diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 2bd3181c08f85..df42a71b9b7cb 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -29,9 +29,7 @@ /airflow/ui/ @bbovenzi @pierrejeambrun @ryanahamilton @jscheffl # Security/Permissions -/airflow/api_connexion/security.py @vincbeck /airflow/security/permissions.py @vincbeck -/airflow/www/security.py @vincbeck # Calendar/Timetables /airflow/timetables/ @uranusjr diff --git a/LICENSE b/LICENSE index 405dcfe69d7c5..25c727ac6e927 100644 --- a/LICENSE +++ b/LICENSE @@ -220,7 +220,6 @@ at 3rd-party-licenses/LICENSE-[project].txt. (ALv2 License) hue v4.3.0 (https://github.com/cloudera/hue/) (ALv2 License) jqclock v2.3.0 (https://github.com/JohnRDOrazio/jQuery-Clock-Plugin) (ALv2 License) bootstrap3-typeahead v4.0.2 (https://github.com/bassjobsen/Bootstrap-3-Typeahead) - (ALv2 License) connexion v2.7.0 (https://github.com/zalando/connexion) ======================================================================== MIT licenses diff --git a/airflow/api_connexion/__init__.py b/airflow/api_connexion/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/airflow/api_connexion/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/airflow/api_connexion/schemas/__init__.py b/airflow/api_connexion/schemas/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/airflow/api_connexion/schemas/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/airflow/api_connexion/schemas/dag_schema.py b/airflow/api_connexion/schemas/dag_schema.py deleted file mode 100644 index a164dec6f7e85..0000000000000 --- a/airflow/api_connexion/schemas/dag_schema.py +++ /dev/null @@ -1,86 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from itsdangerous import URLSafeSerializer -from marshmallow import fields -from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field - -from airflow.configuration import conf -from airflow.models.dag import DagModel, DagTag - - -class DagTagSchema(SQLAlchemySchema): - """Dag Tag schema.""" - - class Meta: - """Meta.""" - - model = DagTag - - name = auto_field() - - -class DAGSchema(SQLAlchemySchema): - """DAG schema.""" - - class Meta: - """Meta.""" - - model = DagModel - - dag_id = auto_field(dump_only=True) - dag_display_name = fields.String(attribute="dag_display_name", dump_only=True) - bundle_name = auto_field(dump_only=True) - bundle_version = auto_field(dump_only=True) - is_paused = auto_field() - is_active = auto_field(dump_only=True) - last_parsed_time = auto_field(dump_only=True) - last_expired = auto_field(dump_only=True) - default_view = auto_field(dump_only=True) - fileloc = auto_field(dump_only=True) - file_token = fields.Method("get_token", dump_only=True) - owners = fields.Method("get_owners", dump_only=True) - description = auto_field(dump_only=True) - timetable_summary = auto_field(dump_only=True) - timetable_description = auto_field(dump_only=True) - tags = fields.List(fields.Nested(DagTagSchema), dump_only=True) - max_active_tasks = auto_field(dump_only=True) - max_active_runs = auto_field(dump_only=True) - max_consecutive_failed_dag_runs = auto_field(dump_only=True) - has_task_concurrency_limits = auto_field(dump_only=True) - has_import_errors = auto_field(dump_only=True) - next_dagrun = auto_field(dump_only=True) - next_dagrun_data_interval_start = auto_field(dump_only=True) - next_dagrun_data_interval_end = auto_field(dump_only=True) - next_dagrun_create_after = auto_field(dump_only=True) - - @staticmethod - def get_owners(obj: DagModel): - """Convert owners attribute to DAG representation.""" - if not getattr(obj, "owners", None): - return [] - return obj.owners.split(",") - - @staticmethod - def get_token(obj: DagModel): - """Return file token.""" - serializer = URLSafeSerializer(conf.get_mandatory_value("webserver", "secret_key")) - return serializer.dumps(obj.fileloc) - - -dag_schema = DAGSchema() diff --git a/airflow/cli/commands/remote_commands/dag_command.py b/airflow/cli/commands/remote_commands/dag_command.py index a2ec030a09fad..7f36217a27cd0 100644 --- a/airflow/cli/commands/remote_commands/dag_command.py +++ b/airflow/cli/commands/remote_commands/dag_command.py @@ -29,16 +29,19 @@ import sys from typing import TYPE_CHECKING +from itsdangerous import URLSafeSerializer +from marshmallow import fields +from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field from sqlalchemy import func, select from airflow.api.client import get_current_api_client -from airflow.api_connexion.schemas.dag_schema import dag_schema from airflow.cli.simple_table import AirflowConsole from airflow.cli.utils import fetch_dag_run_from_run_id_or_logical_date_string +from airflow.configuration import conf from airflow.dag_processing.bundles.manager import DagBundlesManager from airflow.exceptions import AirflowException from airflow.jobs.job import Job -from airflow.models import DagBag, DagModel, DagRun, TaskInstance +from airflow.models import DagBag, DagModel, DagRun, DagTag, TaskInstance from airflow.models.errors import ParseImportError from airflow.models.serialized_dag import SerializedDagModel from airflow.sdk.definitions._internal.dag_parsing_context import _airflow_parsing_context_manager @@ -59,6 +62,66 @@ log = logging.getLogger(__name__) +# TODO: To clean up api_connexion, we need to move the below 2 classes to this file until migrated to FastAPI +class DagTagSchema(SQLAlchemySchema): + """Dag Tag schema.""" + + class Meta: + """Meta.""" + + model = DagTag + + name = auto_field() + + +class DAGSchema(SQLAlchemySchema): + """DAG schema.""" + + class Meta: + """Meta.""" + + model = DagModel + + dag_id = auto_field(dump_only=True) + dag_display_name = fields.String(attribute="dag_display_name", dump_only=True) + bundle_name = auto_field(dump_only=True) + bundle_version = auto_field(dump_only=True) + is_paused = auto_field() + is_active = auto_field(dump_only=True) + last_parsed_time = auto_field(dump_only=True) + last_expired = auto_field(dump_only=True) + default_view = auto_field(dump_only=True) + fileloc = auto_field(dump_only=True) + file_token = fields.Method("get_token", dump_only=True) + owners = fields.Method("get_owners", dump_only=True) + description = auto_field(dump_only=True) + timetable_summary = auto_field(dump_only=True) + timetable_description = auto_field(dump_only=True) + tags = fields.List(fields.Nested(DagTagSchema), dump_only=True) + max_active_tasks = auto_field(dump_only=True) + max_active_runs = auto_field(dump_only=True) + max_consecutive_failed_dag_runs = auto_field(dump_only=True) + has_task_concurrency_limits = auto_field(dump_only=True) + has_import_errors = auto_field(dump_only=True) + next_dagrun = auto_field(dump_only=True) + next_dagrun_data_interval_start = auto_field(dump_only=True) + next_dagrun_data_interval_end = auto_field(dump_only=True) + next_dagrun_create_after = auto_field(dump_only=True) + + @staticmethod + def get_owners(obj: DagModel): + """Convert owners attribute to DAG representation.""" + if not getattr(obj, "owners", None): + return [] + return obj.owners.split(",") + + @staticmethod + def get_token(obj: DagModel): + """Return file token.""" + serializer = URLSafeSerializer(conf.get_mandatory_value("webserver", "secret_key")) + return serializer.dumps(obj.fileloc) + + @cli_utils.action_cli @providers_configuration_loaded def dag_trigger(args) -> None: @@ -329,15 +392,16 @@ def print_execution_interval(interval: DataInterval | None): def dag_list_dags(args, session: Session = NEW_SESSION) -> None: """Display dags with or without stats at the command line.""" cols = args.columns if args.columns else [] - invalid_cols = [c for c in cols if c not in dag_schema.fields] - valid_cols = [c for c in cols if c in dag_schema.fields] + dag_schema_fields = DAGSchema().fields + invalid_cols = [c for c in cols if c not in dag_schema_fields] + valid_cols = [c for c in cols if c in dag_schema_fields] if invalid_cols: from rich import print as rich_print rich_print( f"[red][bold]Error:[/bold] Ignoring the following invalid columns: {invalid_cols}. " - f"List of valid columns: {list(dag_schema.fields.keys())}", + f"List of valid columns: {list(dag_schema_fields.keys())}", file=sys.stderr, ) @@ -363,7 +427,7 @@ def dag_list_dags(args, session: Session = NEW_SESSION) -> None: def get_dag_detail(dag: DAG) -> dict: dag_model = DagModel.get_dagmodel(dag.dag_id, session=session) if dag_model: - dag_detail = dag_schema.dump(dag_model) + dag_detail = DAGSchema().dump(dag_model) else: dag_detail = _get_dagbag_dag_details(dag) return {col: dag_detail[col] for col in valid_cols} @@ -395,7 +459,7 @@ def dag_details(args, session: Session = NEW_SESSION): dag = DagModel.get_dagmodel(args.dag_id, session=session) if not dag: raise SystemExit(f"DAG: {args.dag_id} does not exist in 'dag' table") - dag_detail = dag_schema.dump(dag) + dag_detail = DAGSchema().dump(dag) if args.output in ["table", "plain"]: data = [{"property_name": key, "property_value": value} for key, value in dag_detail.items()] diff --git a/airflow/config_templates/config.yml b/airflow/config_templates/config.yml index d70e244b180f9..f4691a679496d 100644 --- a/airflow/config_templates/config.yml +++ b/airflow/config_templates/config.yml @@ -924,7 +924,7 @@ logging: consoles\. version_added: 2.0.0 type: string - example: "connexion,sqlalchemy" + example: "fastapi,sqlalchemy" default: "" worker_log_server_port: description: | diff --git a/contributing-docs/testing/unit_tests.rst b/contributing-docs/testing/unit_tests.rst index f34002886b4e6..73ed4d5855882 100644 --- a/contributing-docs/testing/unit_tests.rst +++ b/contributing-docs/testing/unit_tests.rst @@ -95,7 +95,7 @@ test types you want to use in various ``breeze testing`` sub-commands in three w Those test types are defined: * ``Always`` - those are tests that should be always executed (always sub-folder) -* ``API`` - Tests for the Airflow API (api, api_connexion, api_internal, api_fastapi sub-folders) +* ``API`` - Tests for the Airflow API (api, api_internal, api_fastapi sub-folders) * ``CLI`` - Tests for the Airflow CLI (cli folder) * ``Core`` - for the core Airflow functionality (core, executors, jobs, models, ti_deps, utils sub-folders) * ``Operators`` - tests for the operators (operators folder) diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index 43140b1dd34de..35549a55b43ee 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -261,7 +261,7 @@ pipx install -e ./dev/breeze git reset --hard origin/v${VERSION_BRANCH}-test ``` -- Set your version in `airflow/__init__.py`, `airflow/api_connexion/openapi/v1.yaml` (without the RC tag). +- Set your version in `airflow/__init__.py` (without the RC tag). - Run `git commit` without a message to update versions in `docs`. - Add supported Airflow version to `./scripts/ci/pre_commit/supported_versions.py` and let pre-commit do the job again. - Replace the versions in `README.md` about installation and verify that installation instructions work fine. @@ -1039,7 +1039,7 @@ EOF This includes: - Modify `./scripts/ci/pre_commit/supported_versions.py` and let pre-commit do the job. -- For major/minor release, update version in `airflow/__init__.py`, `docs/docker-stack/` and `airflow/api_connexion/openapi/v1.yaml` to the next likely minor version release. +- For major/minor release, update version in `airflow/__init__.py` and `docs/docker-stack/` to the next likely minor version release. - Sync `RELEASE_NOTES.rst` (including deleting relevant `newsfragments`) and `README.md` changes. - Updating `Dockerfile` with the new version. - Updating `airflow_bug_report.yml` issue template in `.github/ISSUE_TEMPLATE/` with the new version. @@ -1081,7 +1081,7 @@ Clients can be found here: ### API Clients versioning policy Clients and Core versioning are completely decoupled. Clients also follow SemVer and are updated when core introduce changes relevant to the clients. -Most of the time, if the [openapi specification](https://github.com/apache/airflow/blob/main/airflow/api_connexion/openapi/v1.yaml) has +Most of the time, if the [openapi specification](https://github.com/apache/airflow/blob/main/clients/python/openapi_v1.yaml) has changed, clients need to be released. To determine if you should release API clients, you can run from the airflow repository: diff --git a/dev/README_RELEASE_PYTHON_CLIENT.md b/dev/README_RELEASE_PYTHON_CLIENT.md index 77b2266affa2f..a6c6460d7fedc 100644 --- a/dev/README_RELEASE_PYTHON_CLIENT.md +++ b/dev/README_RELEASE_PYTHON_CLIENT.md @@ -92,7 +92,7 @@ echo "${VERSION}" > clients/python/version.txt ```shell script cd ${AIRFLOW_REPO_ROOT} -git log 2.8.0..HEAD --pretty=oneline -- airflow/api_connexion/openapi/v1.yaml +git log 2.8.0..HEAD --pretty=oneline -- clients/python/openapi_v1.yaml ``` - Update CHANGELOG.md with the details. diff --git a/dev/airflow-github b/dev/airflow-github index 2d1567948d31d..0963c4a2224c0 100755 --- a/dev/airflow-github +++ b/dev/airflow-github @@ -408,7 +408,7 @@ def api_clients_policy(previous_version, target_version): repo, previous_version, target_version, - files=[f"{repo.working_dir}/airflow/api_connexion/openapi/v1.yaml"], + files=[f"{repo.working_dir}/clients/python/openapi_v1.yaml"], ) clients_need_release = False diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 7d7be448088d3..006d56fedc1a4 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -80,8 +80,6 @@ FULL_TESTS_NEEDED_LABEL = "full tests needed" INCLUDE_SUCCESS_OUTPUTS_LABEL = "include success outputs" LATEST_VERSIONS_ONLY_LABEL = "latest versions only" -LEGACY_UI_LABEL = "legacy ui" -LEGACY_API_LABEL = "legacy api" LOG_WITHOUT_MOCK_IN_TESTS_EXCEPTION_LABEL = "log exception" NON_COMMITTER_BUILD_LABEL = "non committer build" UPGRADE_TO_NEWER_DEPENDENCIES_LABEL = "upgrade to newer dependencies" @@ -102,7 +100,6 @@ class FileGroupForCi(Enum): ALWAYS_TESTS_FILES = "always_test_files" API_FILES = "api_files" API_CODEGEN_FILES = "api_codegen_files" - LEGACY_API_FILES = "legacy_api_files" HELM_FILES = "helm_files" DEPENDENCY_FILES = "dependency_files" DOC_FILES = "doc_files" @@ -171,9 +168,6 @@ def __hash__(self): r"^airflow/api_fastapi/core_api/openapi/v1-generated\.yaml", r"^clients/gen", ], - FileGroupForCi.LEGACY_API_FILES: [ - r"^airflow/api_connexion/", - ], FileGroupForCi.HELM_FILES: [ r"^chart", r"^airflow/kubernetes", @@ -288,10 +282,8 @@ def __hash__(self): { SelectiveCoreTestType.API: [ r"^airflow/api/", - r"^airflow/api_connexion/", r"^airflow/api_fastapi/", r"^tests/api/", - r"^tests/api_connexion/", r"^tests/api_fastapi/", ], SelectiveCoreTestType.CLI: [ @@ -1510,30 +1502,6 @@ def _is_canary_run(self): and self._github_repository == APACHE_AIRFLOW_GITHUB_REPOSITORY ) or CANARY_LABEL in self._pr_labels - @cached_property - def is_legacy_ui_api_labeled(self) -> bool: - # Selective check for legacy UI/API updates. - # It is to ping the maintainer to add the label and make them aware of the changes. - if self._is_canary_run() or self._github_event not in ( - GithubEvents.PULL_REQUEST, - GithubEvents.PULL_REQUEST_TARGET, - ): - return False - - if ( - self._matching_files( - FileGroupForCi.LEGACY_API_FILES, CI_FILE_GROUP_MATCHES, CI_FILE_GROUP_EXCLUDES - ) - and LEGACY_API_LABEL not in self._pr_labels - ): - get_console().print( - f"[error]Please ask maintainer to assign " - f"the '{LEGACY_API_LABEL}' label to the PR in order to continue" - ) - sys.exit(1) - else: - return True - @classmethod def _find_caplog_in_def(cls, added_lines): """ diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index 250434548a9af..419e3394982d1 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -51,10 +51,6 @@ # commit that is neutral - allows to keep pyproject.toml-changing PRS neutral for unit tests NEUTRAL_COMMIT = "938f0c1f3cc4cbe867123ee8aa9f290f9f18100a" -# for is_legacy_ui_api_labeled tests -LEGACY_UI_LABEL = "legacy ui" -LEGACY_API_LABEL = "legacy api" - # Use me if you are adding test for the changed files that includes caplog LOG_WITHOUT_MOCK_IN_TESTS_EXCEPTION_LABEL = "log exception" diff --git a/generated/dep_tree.txt b/generated/dep_tree.txt index a2c2f59f5b848..2d117e3741672 100644 --- a/generated/dep_tree.txt +++ b/generated/dep_tree.txt @@ -27,55 +27,6 @@ apache-airflow v3.0.0.dev0 ├── blinker v1.8.2 ├── colorlog v6.8.2 ├── configupdater v3.2 -├── connexion[flask] v2.14.2 -│ ├── clickclick v20.10.2 -│ │ ├── click v8.1.7 -│ │ └── pyyaml v6.0.2 -│ ├── flask v2.2.5 -│ │ ├── click v8.1.7 -│ │ ├── importlib-metadata v8.4.0 -│ │ │ └── zipp v3.20.1 -│ │ ├── itsdangerous v2.2.0 -│ │ ├── jinja2 v3.1.4 -│ │ │ └── markupsafe v2.1.5 -│ │ └── werkzeug v2.2.3 -│ │ └── markupsafe v2.1.5 -│ ├── inflection v0.5.1 -│ ├── itsdangerous v2.2.0 -│ ├── jsonschema v4.23.0 -│ │ ├── attrs v24.2.0 -│ │ ├── importlib-resources v6.4.4 -│ │ │ └── zipp v3.20.1 -│ │ ├── jsonschema-specifications v2023.12.1 -│ │ │ ├── importlib-resources v6.4.4 -│ │ │ │ └── zipp v3.20.1 -│ │ │ └── referencing v0.35.1 -│ │ │ ├── attrs v24.2.0 -│ │ │ └── rpds-py v0.20.0 -│ │ ├── pkgutil-resolve-name v1.3.10 -│ │ ├── referencing v0.35.1 -│ │ │ ├── attrs v24.2.0 -│ │ │ └── rpds-py v0.20.0 -│ │ └── rpds-py v0.20.0 -│ ├── packaging v24.1 -│ ├── pyyaml v6.0.2 -│ ├── requests v2.32.3 -│ │ ├── certifi v2024.8.30 -│ │ ├── charset-normalizer v3.3.2 -│ │ ├── idna v3.8 -│ │ └── urllib3 v2.2.2 -│ ├── werkzeug v2.2.3 -│ │ └── markupsafe v2.1.5 -│ ├── flask v2.2.5 (extra: flask) -│ │ ├── click v8.1.7 -│ │ ├── importlib-metadata v8.4.0 -│ │ │ └── zipp v3.20.1 -│ │ ├── itsdangerous v2.2.0 -│ │ ├── jinja2 v3.1.4 -│ │ │ └── markupsafe v2.1.5 -│ │ └── werkzeug v2.2.3 -│ │ └── markupsafe v2.1.5 -│ └── itsdangerous v2.2.0 (extra: flask) ├── cron-descriptor v1.4.5 ├── croniter v3.0.3 │ ├── python-dateutil v2.9.0.post0 diff --git a/generated/dependency_depth.json b/generated/dependency_depth.json index 593e791ae0925..7d1c40327e784 100644 --- a/generated/dependency_depth.json +++ b/generated/dependency_depth.json @@ -23,7 +23,6 @@ "blinker": 1, "colorlog": 1, "configupdater": 1, - "connexion[flask]": 1, "clickclick": 2, "click": 3, "pyyaml": 3, diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index ae9fa64574206..44e1a4e70ea09 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -591,6 +591,7 @@ "deps": [ "apache-airflow-providers-common-compat>=1.2.1", "apache-airflow>=3.0.0.dev0", + "connexion[flask]>=2.14.2,<3.0", "flask-appbuilder==4.5.3", "flask-login>=0.6.2", "flask>=2.2,<2.3", diff --git a/hatch_build.py b/hatch_build.py index 0a0e784f08b73..5bdf7ae9718c1 100644 --- a/hatch_build.py +++ b/hatch_build.py @@ -197,13 +197,6 @@ "blinker>=1.6.2", "colorlog>=6.8.2", "configupdater>=3.1.1", - # `airflow/www/extensions/init_views` imports `connexion.decorators.validation.RequestBodyValidator` - # connexion v3 has refactored the entire module to middleware, see: /spec-first/connexion/issues/1525 - # Specifically, RequestBodyValidator was removed in: /spec-first/connexion/pull/1595 - # The usage was added in #30596, seemingly only to override and improve the default error message. - # Either revert that change or find another way, preferably without using connexion internals. - # This limit can be removed after https://github.com/apache/airflow/issues/35234 is fixed - "connexion[flask]>=2.14.2,<3.0", "cron-descriptor>=1.2.24", "croniter>=2.0.2", "cryptography>=41.0.0", @@ -279,8 +272,8 @@ # Does not work with it Tracked in https://github.com/fsspec/universal_pathlib/issues/276 "universal-pathlib>=0.2.2,!=0.2.4", "uuid6>=2024.7.10", - # Werkzug 3 breaks Flask-Login 0.6.2, also connexion needs to be updated to >= 3.0 - # we should remove this limitation when FAB supports Flask 2.3 and we migrate connexion to 3+ + # Werkzug 3 breaks Flask-Login 0.6.2 + # we should remove this limitation when FAB supports Flask 2.3 "werkzeug>=2.0,<3", ] diff --git a/providers/fab/README.rst b/providers/fab/README.rst index 772dce9a8f322..859bd8fc8bafd 100644 --- a/providers/fab/README.rst +++ b/providers/fab/README.rst @@ -58,6 +58,7 @@ PIP package Version required ``flask`` ``>=2.2,<2.3`` ``flask-appbuilder`` ``==4.5.3`` ``flask-login`` ``>=0.6.2`` +``connexion[flask]`` ``>=2.14.2,<3.0`` ``jmespath`` ``>=0.7.0`` ========================================== ================== diff --git a/providers/fab/pyproject.toml b/providers/fab/pyproject.toml index bf693eb033c12..9f5cde6778393 100644 --- a/providers/fab/pyproject.toml +++ b/providers/fab/pyproject.toml @@ -67,6 +67,7 @@ dependencies = [ # In particular, make sure any breaking changes, for example any new methods, are accounted for. "flask-appbuilder==4.5.3", "flask-login>=0.6.2", + "connexion[flask]>=2.14.2,<3.0", "jmespath>=0.7.0", ] diff --git a/providers/fab/src/airflow/providers/fab/get_provider_info.py b/providers/fab/src/airflow/providers/fab/get_provider_info.py index efe4452fe2ec4..fa183c9aa293b 100644 --- a/providers/fab/src/airflow/providers/fab/get_provider_info.py +++ b/providers/fab/src/airflow/providers/fab/get_provider_info.py @@ -82,6 +82,7 @@ def get_provider_info(): "flask>=2.2,<2.3", "flask-appbuilder==4.5.3", "flask-login>=0.6.2", + "connexion[flask]>=2.14.2,<3.0", "jmespath>=0.7.0", ], "optional-dependencies": {"kerberos": ["kerberos>=1.3.0"]}, diff --git a/pyproject.toml b/pyproject.toml index a6bb054bd3d2c..c8a3c37b2e397 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -506,12 +506,6 @@ filterwarnings = [ "ignore::DeprecationWarning:flask_sqlalchemy", # https://github.com/dpgaspar/Flask-AppBuilder/pull/1903 "ignore::DeprecationWarning:apispec.utils", - # Connexion 2 use different deprecated objects, this should be resolved into Connexion 3 - # https://github.com/spec-first/connexion/pull/1536 - 'ignore::DeprecationWarning:connexion.spec', - 'ignore:jsonschema\.RefResolver:DeprecationWarning:connexion.json_schema', - 'ignore:jsonschema\.exceptions\.RefResolutionError:DeprecationWarning:connexion.json_schema', - 'ignore:Accessing jsonschema\.draft4_format_checker:DeprecationWarning:connexion.decorators.validation', ] # We cannot add warnings from the airflow package into `filterwarnings`, # because it invokes import airflow before we set up test environment which breaks the tests. diff --git a/scripts/ci/pre_commit/check_tests_in_right_folders.py b/scripts/ci/pre_commit/check_tests_in_right_folders.py index a04400e1c0cb4..48665d1f90148 100755 --- a/scripts/ci/pre_commit/check_tests_in_right_folders.py +++ b/scripts/ci/pre_commit/check_tests_in_right_folders.py @@ -31,7 +31,6 @@ "_internals", "always", "api", - "api_connexion", "api_internal", "api_fastapi", "assets", diff --git a/scripts/cov/restapi_coverage.py b/scripts/cov/restapi_coverage.py index 52728c4b7d8b7..f87ce9880c5f7 100644 --- a/scripts/cov/restapi_coverage.py +++ b/scripts/cov/restapi_coverage.py @@ -23,16 +23,9 @@ sys.path.insert(0, str(Path(__file__).parent.resolve())) -source_files = ["airflow/api_connexion"] +source_files = ["airflow/api_fastapi"] -files_not_fully_covered = [ - "airflow/api_connexion/endpoints/forward_to_fab_endpoint.py", - "airflow/api_connexion/endpoints/task_instance_endpoint.py", - "airflow/api_connexion/exceptions.py", - "airflow/api_connexion/schemas/common_schema.py", - "airflow/api_connexion/security.py", - "airflow/api_connexion/types.py", -] +files_not_fully_covered: list[str] = [] if __name__ == "__main__": args = ["-qq"] + source_files diff --git a/tests/cli/commands/remote_commands/test_dag_command.py b/tests/cli/commands/remote_commands/test_dag_command.py index 5a5e16e0504bd..c509aa009fc50 100644 --- a/tests/cli/commands/remote_commands/test_dag_command.py +++ b/tests/cli/commands/remote_commands/test_dag_command.py @@ -32,7 +32,6 @@ from sqlalchemy import select from airflow import settings -from airflow.api_connexion.schemas.dag_schema import DAGSchema, dag_schema from airflow.cli import cli_parser from airflow.cli.commands.remote_commands import dag_command from airflow.decorators import task @@ -237,7 +236,7 @@ def test_cli_get_dag_details(self): dag_command.dag_details(args) out = temp_stdout.getvalue() - dag_detail_fields = DAGSchema().fields.keys() + dag_detail_fields = dag_command.DAGSchema().fields.keys() # Check if DAG Details field are present for field in dag_detail_fields: @@ -311,7 +310,7 @@ def test_list_dags_none_get_dagmodel(self, mock_get_dagmodel): @conf_vars({("core", "load_examples"): "true"}) def test_dagbag_dag_col(self): - valid_cols = [c for c in dag_schema.fields] + valid_cols = [c for c in dag_command.DAGSchema().fields] dagbag = DagBag(include_examples=True) dag_details = dag_command._get_dagbag_dag_details(dagbag.get_dag("tutorial_dag")) assert list(dag_details.keys()) == valid_cols