diff --git a/.github/actions/install-pre-commit/action.yml b/.github/actions/install-pre-commit/action.yml index 9f4b091fddcea..3d3196c31b5be 100644 --- a/.github/actions/install-pre-commit/action.yml +++ b/.github/actions/install-pre-commit/action.yml @@ -21,7 +21,7 @@ description: 'Installs pre-commit and related packages' inputs: python-version: description: 'Python version to use' - default: "3.9" + default: "3.10" uv-version: description: 'uv version to use' default: "0.7.16" # Keep this comment to allow automatic replacement of uv version diff --git a/.github/actions/prepare_all_ci_images/action.yml b/.github/actions/prepare_all_ci_images/action.yml index 7b461f4e3e2db..f245d5081d5d8 100644 --- a/.github/actions/prepare_all_ci_images/action.yml +++ b/.github/actions/prepare_all_ci_images/action.yml @@ -36,6 +36,7 @@ runs: # this should be implemented in stash action as list of keys to download. # That includes 3.9 - 3.12 as we are backporting it to v3-0-test branch # This is captured in https://github.com/apache/airflow/issues/45268 + # So we actually need 3.9 even if 3.9 support on main is dropped! - name: "Restore CI docker image ${{ inputs.platform }}:3.9" uses: ./.github/actions/prepare_single_ci_image with: diff --git a/.github/workflows/publish-docs-to-s3.yml b/.github/workflows/publish-docs-to-s3.yml index 6139d33182e50..7d93753146458 100644 --- a/.github/workflows/publish-docs-to-s3.yml +++ b/.github/workflows/publish-docs-to-s3.yml @@ -118,7 +118,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} INCLUDE_SUCCESS_OUTPUTS: false - PYTHON_MAJOR_MINOR_VERSION: 3.9 + PYTHON_MAJOR_MINOR_VERSION: 3.10 VERBOSE: "true" EXTRA_BUILD_OPTIONS: ${{ needs.build-info.outputs.extra-build-options }} steps: @@ -187,7 +187,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} INCLUDE_SUCCESS_OUTPUTS: false - PYTHON_MAJOR_MINOR_VERSION: 3.9 + PYTHON_MAJOR_MINOR_VERSION: 3.10 VERBOSE: "true" steps: - name: "Cleanup repo" diff --git a/.github/workflows/release_dockerhub_image.yml b/.github/workflows/release_dockerhub_image.yml index 2499e521d74a3..1ec9ec8f91be4 100644 --- a/.github/workflows/release_dockerhub_image.yml +++ b/.github/workflows/release_dockerhub_image.yml @@ -29,7 +29,7 @@ on: # yamllint disable-line rule:truthy default: false limitPythonVersions: type: string - description: 'Force python versions (e.g. "3.9 3.10")' + description: 'Force python versions (e.g. "3.10 3.11")' default: '' permissions: contents: read diff --git a/.github/workflows/release_single_dockerhub_image.yml b/.github/workflows/release_single_dockerhub_image.yml index fd572adbabab1..b308de93e72ee 100644 --- a/.github/workflows/release_single_dockerhub_image.yml +++ b/.github/workflows/release_single_dockerhub_image.yml @@ -29,7 +29,7 @@ on: # yamllint disable-line rule:truthy type: string required: true pythonVersion: - description: 'Python version (e.g. 3.8, 3.9, 3.10, 3.11)' + description: 'Python version (e.g. 3.10, 3.11)' type: string required: true skipLatest: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 265f52fd1c2ba..275daa7bdd6e5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -237,8 +237,7 @@ repos: entry: ./scripts/ci/pre_commit/check_deferrable_default.py pass_filenames: false # libcst doesn't have source wheels for all PY except PY3.12, excluding it - # libcst 1.8.1 doesn't include typing-extensions which is needed for Python 3.9 - additional_dependencies: ['libcst>=1.1.0,!=1.8.0,!=1.8.1'] + additional_dependencies: ['libcst>=1.8.1'] files: ^(providers/.*/)?airflow/.*/(sensors|operators)/.*\.py$ - repo: https://github.com/asottile/blacken-docs rev: 1.19.1 @@ -247,7 +246,6 @@ repos: name: Run black on docs args: - --line-length=110 - - --target-version=py39 - --target-version=py310 - --target-version=py311 - --target-version=py312 diff --git a/.readthedocs.yml b/.readthedocs.yml index c276d282294ca..ddc2ffd3681fe 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -20,7 +20,7 @@ formats: [] sphinx: configuration: devel-common/src/docs/rtd-deprecation/conf.py python: - version: "3.9" + version: "3.10" install: - method: pip path: . diff --git a/AGENTS.md b/AGENTS.md index c49c534016767..724896d268710 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -34,7 +34,7 @@ how to set up the environment, run checks, build docs and follow the PR workflow ## Running tests -- [`03_contributors_quick_start.rst`](contributing-docs/03_contributors_quick_start.rst) shows running tests inside Breeze. Use `pytest` inside the container for individual files or invoke `breeze testing` commands to run full suites, e.g. `breeze --backend postgres --python 3.9 testing tests --test-type All`. +- [`03_contributors_quick_start.rst`](contributing-docs/03_contributors_quick_start.rst) shows running tests inside Breeze. Use `pytest` inside the container for individual files or invoke `breeze testing` commands to run full suites, e.g. `breeze --backend postgres --python 3.10 testing tests --test-type All`. ## Building documentation diff --git a/Dockerfile.ci b/Dockerfile.ci index 766305fb0e6f9..6a7f56ae0e3bc 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -840,7 +840,7 @@ chmod 1777 /tmp AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd) -PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.9} +PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.10} export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} diff --git a/INSTALL b/INSTALL index b7a6a51602623..59e596ca79350 100644 --- a/INSTALL +++ b/INSTALL @@ -229,15 +229,15 @@ to avoid "works-for-me" syndrome, where you use different versions of dependenci that are used in main CI tests and by other contributors. There are different constraint files for different Python versions. For example, this command will install -all basic devel requirements and requirements of Google provider as last successfully tested for Python 3.9: +all basic devel requirements and requirements of Google provider as last successfully tested for Python 3.10: uv pip install -e ".[devel,google]"" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.10.txt" Using the 'constraints-no-providers' constraint files, you can upgrade Airflow without paying attention to the provider's dependencies. This allows you to keep installed provider dependencies and install the latest supported ones using pure Airflow core. uv pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.10.txt" Note that you can also use `pip install` if you do not use `uv`. diff --git a/README.md b/README.md index 463b00efcabe2..84dc6c4f762dd 100644 --- a/README.md +++ b/README.md @@ -178,14 +178,14 @@ them to the appropriate format and workflow that your tool requires. ```bash pip install 'apache-airflow==3.0.2' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.2/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.2/constraints-3.10.txt" ``` 2. Installing with extras (i.e., postgres, google) ```bash pip install 'apache-airflow[postgres,google]==3.0.2' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.2/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.2/constraints-3.10.txt" ``` For information on installing provider distributions, check @@ -323,7 +323,7 @@ They are based on the official release schedule of Python and Kubernetes, nicely 1. We drop support for Python and Kubernetes versions when they reach EOL. Except for Kubernetes, a version stays supported by Airflow if two major cloud providers still provide support for it. We drop support for those EOL versions in main right after EOL date, and it is effectively removed when we release - the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.9 it + the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.10 it means that we will drop support in main right after 27.06.2023, and the first MAJOR or MINOR version of Airflow released after will not have it. diff --git a/airflow-core/docs/installation/installing-from-pypi.rst b/airflow-core/docs/installation/installing-from-pypi.rst index b51b897b1344b..63bcee23e3244 100644 --- a/airflow-core/docs/installation/installing-from-pypi.rst +++ b/airflow-core/docs/installation/installing-from-pypi.rst @@ -105,7 +105,7 @@ You can create the URL to the file substituting the variables in the template be where: - ``AIRFLOW_VERSION`` - Airflow version (e.g. :subst-code:`|version|`) or ``main``, ``2-0``, for latest development version -- ``PYTHON_VERSION`` Python version e.g. ``3.9``, ``3.10`` +- ``PYTHON_VERSION`` Python version e.g. ``3.10``, ``3.11`` The examples below assume that you want to use install Airflow in a reproducible way with the ``celery`` extra, but you can pick your own set of extras and providers to install. @@ -321,9 +321,9 @@ dependencies compatible with just Airflow core at the moment Airflow was release AIRFLOW_VERSION=|version| PYTHON_VERSION="$(python -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')" - # For example: 3.9 + # For example: 3.10 CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-no-providers-${PYTHON_VERSION}.txt" - # For example: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-no-providers-3.9.txt + # For example: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-no-providers-3.10.txt pip install "apache-airflow==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" Troubleshooting @@ -347,7 +347,7 @@ Symbol not found: ``_Py_GetArgcArgv`` ===================================== If you see ``Symbol not found: _Py_GetArgcArgv`` while starting or importing ``airflow``, this may mean that you are using an incompatible version of Python. -For a homebrew installed version of Python, this is generally caused by using Python in ``/usr/local/opt/bin`` rather than the Frameworks installation (e.g. for ``python 3.9``: ``/usr/local/opt/python@3.9/Frameworks/Python.framework/Versions/3.9``). +For a homebrew installed version of Python, this is generally caused by using Python in ``/usr/local/opt/bin`` rather than the Frameworks installation (e.g. for ``python 3.10``: ``/usr/local/opt/python@3.10/Frameworks/Python.framework/Versions/3.10``). The crux of the issue is that a library Airflow depends on, ``setproctitle``, uses a non-public Python API which is not available from the standard installation ``/usr/local/opt/`` (which symlinks to a path under ``/usr/local/Cellar``). @@ -356,9 +356,9 @@ An easy fix is just to ensure you use a version of Python that has a dylib of th .. code-block:: bash - # Note: these instructions are for python3.9 but can be loosely modified for other versions - brew install python@3.9 - virtualenv -p /usr/local/opt/python@3.9/Frameworks/Python.framework/Versions/3.9/bin/python3 .toy-venv + # Note: these instructions are for python3.10 but can be loosely modified for other versions + brew install python@3.10 + virtualenv -p /usr/local/opt/python@3.10/Frameworks/Python.framework/Versions/3.10/bin/python3 .toy-venv source .toy-venv/bin/activate pip install apache-airflow python diff --git a/airflow-core/docs/installation/prerequisites.rst b/airflow-core/docs/installation/prerequisites.rst index 323af74f8e515..6859918d9c65c 100644 --- a/airflow-core/docs/installation/prerequisites.rst +++ b/airflow-core/docs/installation/prerequisites.rst @@ -20,7 +20,7 @@ Prerequisites Airflow® is tested with: -* Python: 3.9, 3.10, 3.11, 3.12 +* Python: 3.10, 3.11, 3.12 * Databases: diff --git a/airflow-core/docs/installation/upgrading_to_airflow3.rst b/airflow-core/docs/installation/upgrading_to_airflow3.rst index f093f7430339c..32087d4cc84eb 100644 --- a/airflow-core/docs/installation/upgrading_to_airflow3.rst +++ b/airflow-core/docs/installation/upgrading_to_airflow3.rst @@ -24,7 +24,7 @@ Step 1: Take care of prerequisites ---------------------------------- - Make sure that you are on Airflow 2.7 or later. It is recommended to upgrade to latest 2.x and then to Airflow 3. -- Make sure that your Python version is in the supported list. Airflow 3.0.0 supports the following Python versions: Python 3.9, 3.10, 3.11 and 3.12. +- Make sure that your Python version is in the supported list. Airflow 3.0.0 supports the following Python versions: Python 3.10, 3.11 and 3.12. - Ensure that you are not using any features or functionality that have been :ref:`removed in Airflow 3`. diff --git a/airflow-core/docs/start.rst b/airflow-core/docs/start.rst index 07af3ba73028f..ccc0f7738bfca 100644 --- a/airflow-core/docs/start.rst +++ b/airflow-core/docs/start.rst @@ -24,7 +24,7 @@ This quick start guide will help you bootstrap an Airflow standalone instance on .. note:: - Successful installation requires a Python 3 environment. Starting with Airflow 2.7.0, Airflow supports Python 3.9, 3.10, 3.11, and 3.12. + Successful installation requires a Python 3 environment. Starting with Airflow 2.7.0, Airflow supports Python 3.10, 3.11, and 3.12. Officially supported installation methods is with``pip`. diff --git a/airflow-core/pyproject.toml b/airflow-core/pyproject.toml index 36826c11661a2..d5d03e4021c02 100644 --- a/airflow-core/pyproject.toml +++ b/airflow-core/pyproject.toml @@ -35,7 +35,7 @@ name = "apache-airflow-core" description = "Core packages for Apache Airflow, schedule and API server" readme = { file = "README.md", content-type = "text/markdown" } license-files.globs = ["LICENSE", "3rd-party-licenses/*.txt", "NOTICE"] -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] @@ -51,7 +51,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/connections.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/connections.py index fee330e1fd1d6..2a6bd6bec8af2 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/connections.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/connections.py @@ -47,7 +47,7 @@ class ConnectionResponse(BaseModel): def redact_password(cls, v: str | None, field_info: ValidationInfo) -> str | None: if v is None: return None - return redact(v, field_info.field_name) + return str(redact(v, field_info.field_name)) @field_validator("extra", mode="before") @classmethod diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/variables.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/variables.py index 2905e752650cd..d60f052f77705 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/variables.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/variables.py @@ -46,7 +46,7 @@ def redact_val(self) -> Self: return self except json.JSONDecodeError: # value is not a serialized string representation of a dict. - self.val = redact(self.val, self.key) + self.val = str(redact(self.val, self.key)) return self diff --git a/airflow-ctl/README.md b/airflow-ctl/README.md index 8e543717d9bdc..28f1b47381224 100644 --- a/airflow-ctl/README.md +++ b/airflow-ctl/README.md @@ -31,7 +31,7 @@ A command-line tool for interacting with Apache Airflow instances through the Ai ## Requirements -- Python 3.9 or later (compatible with Python >= 3.9 and < 3.13) +- Python 3.10 or later (compatible with Python >= 3.10 and < 3.13) - Network access to an Apache Airflow instance with REST API enabled - Keyring backend installed in operating system for secure token storage diff --git a/airflow-ctl/docs/installation/installing-from-pypi.rst b/airflow-ctl/docs/installation/installing-from-pypi.rst index f498c163298a7..a02a6a8991733 100644 --- a/airflow-ctl/docs/installation/installing-from-pypi.rst +++ b/airflow-ctl/docs/installation/installing-from-pypi.rst @@ -97,7 +97,7 @@ You can create the URL to the file substituting the variables in the template be where: - ``AIRFLOW_CTL_VERSION`` - Airflow CTL version (e.g. :subst-code:`|version|`) or ``main``, ``2-0``, for latest development version -- ``PYTHON_VERSION`` Python version e.g. ``3.9``, ``3.10`` +- ``PYTHON_VERSION`` Python version e.g. ``3.10``, ``3.11`` Verifying installed dependencies diff --git a/airflow-ctl/pyproject.toml b/airflow-ctl/pyproject.toml index 6d2d96f0fa478..a8590364f6f40 100644 --- a/airflow-ctl/pyproject.toml +++ b/airflow-ctl/pyproject.toml @@ -20,7 +20,7 @@ name = "apache-airflow-ctl" dynamic = ["version"] description = "Apache Airflow command line tool for communicating with an Apache Airflow, using the API." readme = { file = "README.md", content-type = "text/markdown" } -requires-python = ">=3.9, <3.13" +requires-python = ">=3.10, <3.13" dependencies = [ # TODO there could be still missing deps such as airflow-core "argcomplete>=1.10", @@ -33,7 +33,6 @@ dependencies = [ "rich-argparse>=1.0.0", "structlog>=25.2.0", "uuid6>=2024.7.10", - "eval-type-backport>=0.2.0; python_version == '3.9'", ] classifiers = [ @@ -130,14 +129,14 @@ enum-field-as-literal='one' # When a single enum member, make it output a `Liter input-file-type='openapi' output-model-type='pydantic_v2.BaseModel' output-datetime-class='datetime' -target-python-version='3.9' +target-python-version='3.10' use-annotated=true use-default=true use-double-quotes=true use-schema-description=true # Desc becomes class doc comment use-standard-collections=true # list[] not List[] use-subclass-enum=true # enum, not union of Literals -use-union-operator=true # 3.9+annotations, not `Union[]` +use-union-operator=true # +annotations, not `Union[]` input = "../airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml" output = "src/airflowctl/api/datamodels/generated.py" diff --git a/airflow-ctl/src/airflowctl/api/client.py b/airflow-ctl/src/airflowctl/api/client.py index 162e21c9422dc..4f707b18c5f95 100644 --- a/airflow-ctl/src/airflowctl/api/client.py +++ b/airflow-ctl/src/airflowctl/api/client.py @@ -22,8 +22,9 @@ import json import os import sys +from collections.abc import Callable from functools import wraps -from typing import TYPE_CHECKING, Any, Callable, Literal, TypeVar, cast +from typing import TYPE_CHECKING, Any, Literal, ParamSpec, TypeVar, cast import httpx import keyring @@ -53,7 +54,6 @@ AirflowCtlException, AirflowCtlNotFoundException, ) -from airflowctl.typing_compat import ParamSpec if TYPE_CHECKING: # # methodtools doesn't have typestubs, so give a stub diff --git a/airflow-ctl/src/airflowctl/ctl/cli_config.py b/airflow-ctl/src/airflowctl/ctl/cli_config.py index 7da28d8b77481..71bff72125b6a 100644 --- a/airflow-ctl/src/airflowctl/ctl/cli_config.py +++ b/airflow-ctl/src/airflowctl/ctl/cli_config.py @@ -27,10 +27,10 @@ import os import textwrap from argparse import Namespace -from collections.abc import Iterable +from collections.abc import Callable, Iterable from functools import partial from pathlib import Path -from typing import Any, Callable, NamedTuple, Union +from typing import Any, NamedTuple import rich @@ -330,7 +330,7 @@ def from_group_command(cls, group_command: GroupCommand) -> GroupCommandParser: ) -CLICommand = Union[ActionCommand, GroupCommand, GroupCommandParser] +CLICommand = ActionCommand | GroupCommand | GroupCommandParser class CommandFactory: diff --git a/airflow-ctl/src/airflowctl/typing_compat.py b/airflow-ctl/src/airflowctl/typing_compat.py deleted file mode 100644 index 625e9e37ca667..0000000000000 --- a/airflow-ctl/src/airflowctl/typing_compat.py +++ /dev/null @@ -1,29 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -"""This module provides helper code to make type annotation within airflowctl codebase easier.""" - -from __future__ import annotations - -__all__ = ["ParamSpec"] - -import sys - -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec diff --git a/airflow-ctl/src/airflowctl/utils/helpers.py b/airflow-ctl/src/airflowctl/utils/helpers.py index 4d1c6a7d7a0f5..d73defb3c5f97 100644 --- a/airflow-ctl/src/airflowctl/utils/helpers.py +++ b/airflow-ctl/src/airflowctl/utils/helpers.py @@ -18,8 +18,8 @@ from __future__ import annotations import itertools -from collections.abc import Iterable -from typing import Callable, TypeVar +from collections.abc import Callable, Iterable +from typing import TypeVar T = TypeVar("T") diff --git a/chart/pyproject.toml b/chart/pyproject.toml index 2c559d1997dc8..5daf4f3a7699b 100644 --- a/chart/pyproject.toml +++ b/chart/pyproject.toml @@ -40,7 +40,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", diff --git a/clients/python/pyproject.toml b/clients/python/pyproject.toml index 2cfdeeaad749a..b7f01bb004de6 100644 --- a/clients/python/pyproject.toml +++ b/clients/python/pyproject.toml @@ -42,7 +42,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -73,7 +72,7 @@ run-coverage = "pytest test" run = "run-coverage --no-cov" [[tool.hatch.envs.test.matrix]] -python = ["3.9", "3.10", "3.11"] +python = ["3.10", "3.11"] [tool.hatch.version] path = "./version.txt" diff --git a/clients/python/test_python_client.py b/clients/python/test_python_client.py index bf04d68500ea6..4000686f88229 100644 --- a/clients/python/test_python_client.py +++ b/clients/python/test_python_client.py @@ -17,7 +17,7 @@ # # PEP 723 compliant inline script metadata (not yet widely supported) # /// script -# requires-python = ">=3.9" +# requires-python = ">=3.10" # dependencies = [ # "apache-airflow-client", # "rich", diff --git a/constraints/README.md b/constraints/README.md index 97c03dab0c400..758048a900690 100644 --- a/constraints/README.md +++ b/constraints/README.md @@ -29,12 +29,12 @@ This allows you to iterate on dependencies without having to run `--upgrade-to-n Typical workflow in this case is: * download and copy the constraint file to the folder (for example via -[The GitHub Raw Link](https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt)) +[The GitHub Raw Link](https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.10.txt)) * modify the constraint file in "constraints" folder * build the image using this command ```bash -breeze ci-image build --python 3.9 --airflow-constraints-location constraints/constraints-3.9.txt +breeze ci-image build --python 3.10 --airflow-constraints-location constraints/constraints-3.10.txt ``` You can continue iterating and updating the constraint file (and rebuilding the image) @@ -46,7 +46,7 @@ pip freeze | sort | \ grep -v "apache_airflow" | \ grep -v "apache-airflow==" | \ grep -v "@" | \ - grep -v "/opt/airflow" > /opt/airflow/constraints/constraints-3.9.txt + grep -v "/opt/airflow" > /opt/airflow/constraints/constraints-3.10.txt ``` If you are working with others on updating the dependencies, you can also commit the constraint diff --git a/contributing-docs/03_contributors_quick_start.rst b/contributing-docs/03_contributors_quick_start.rst index c10d4d80ab63c..ccc9a3ecac8b1 100644 --- a/contributing-docs/03_contributors_quick_start.rst +++ b/contributing-docs/03_contributors_quick_start.rst @@ -255,8 +255,8 @@ To avoid burden on our CI infrastructure and to save time, Pre-commit hooks can We have recently started to recommend ``uv`` for our local development. .. note:: - Remember to have global python set to Python >= 3.9 - Python 3.8 is end-of-life already and we've - started to use Python 3.9+ features in Airflow and accompanying scripts. + Remember to have global python set to Python >= 3.10 - Python 3.10 is end-of-life already and we've + started to use Python 3.10+ features in Airflow and accompanying scripts. Installing pre-commit is best done with ``uv`` (recommended) or ``pipx``. @@ -433,7 +433,7 @@ see in CI in your local environment. .. code-block:: bash - breeze --python 3.9 --backend postgres + breeze --python 3.10 --backend postgres .. note:: If you encounter an error like "docker.credentials.errors.InitializationError: @@ -490,7 +490,7 @@ Using Breeze ------------ 1. Starting the Breeze environment using ``breeze start-airflow`` starts the Breeze environment with last configuration run( - In this case Python version and backend are picked up from last execution ``breeze --python 3.9 --backend postgres``) + In this case Python version and backend are picked up from last execution ``breeze --python 3.10 --backend postgres``) It also automatically starts the API server (FastAPI api and UI), triggerer, dag processor and scheduler. It drops you in tmux with triggerer to the right, and Scheduler, API server (FastAPI api and UI), DAG processor from left to right at the bottom. Use ``[Ctrl + B] and Arrow keys`` to navigate. @@ -501,9 +501,9 @@ Using Breeze Use CI image. Branch name: main - Docker image: ghcr.io/apache/airflow/main/ci/python3.9:latest + Docker image: ghcr.io/apache/airflow/main/ci/python3.10:latest Airflow source version: 2.4.0.dev0 - Python version: 3.9 + Python version: 3.10 Backend: mysql 5.7 * Port forwarding: @@ -540,7 +540,7 @@ Using Breeze .. code-block:: bash - breeze --python 3.9 --backend postgres + breeze --python 3.10 --backend postgres 2. Open tmux @@ -614,7 +614,7 @@ If ``breeze`` was started with ``breeze start-airflow``, this command will stop root@f3619b74c59a:/opt/airflow# stop_airflow breeze down -If ``breeze`` was started with ``breeze --python 3.9 --backend postgres`` (or similar): +If ``breeze`` was started with ``breeze --python 3.10 --backend postgres`` (or similar): .. code-block:: bash @@ -674,7 +674,7 @@ All Tests are inside ./tests directory. root@63528318c8b1:/opt/airflow# pytest tests/utils/test_dates.py ============================================================= test session starts ============================================================== - platform linux -- Python 3.9.20, pytest-8.3.3, pluggy-1.5.0 -- /usr/local/bin/python + platform linux -- Python 3.10.20, pytest-8.3.3, pluggy-1.5.0 -- /usr/local/bin/python cachedir: .pytest_cache rootdir: /opt/airflow configfile: pyproject.toml @@ -694,20 +694,20 @@ All Tests are inside ./tests directory. .. code-block:: bash - breeze --backend postgres --postgres-version 15 --python 3.9 --db-reset testing tests --test-type All + breeze --backend postgres --postgres-version 15 --python 3.10 --db-reset testing tests --test-type All - Running specific type of test .. code-block:: bash - breeze --backend postgres --postgres-version 15 --python 3.9 --db-reset testing tests --test-type Core + breeze --backend postgres --postgres-version 15 --python 3.10 --db-reset testing tests --test-type Core - Running Integration test for specific test type .. code-block:: bash - breeze --backend postgres --postgres-version 15 --python 3.9 --db-reset testing tests --test-type All --integration mongo + breeze --backend postgres --postgres-version 15 --python 3.10 --db-reset testing tests --test-type All --integration mongo - For more information on Testing visit |09_testing.rst| diff --git a/contributing-docs/05_pull_requests.rst b/contributing-docs/05_pull_requests.rst index d6e360721d275..5a7a4d96e13b4 100644 --- a/contributing-docs/05_pull_requests.rst +++ b/contributing-docs/05_pull_requests.rst @@ -92,7 +92,7 @@ these guidelines: you can push your code to PR and see results of the tests in the CI. - You can use any supported python version to run the tests, but the best is to check - if it works for the oldest supported version (Python 3.9 currently). In rare cases + if it works for the oldest supported version (Python 3.10 currently). In rare cases tests might fail with the oldest version when you use features that are available in newer Python versions. For that purpose we have ``airflow.compat`` package where we keep back-ported useful features from newer versions. diff --git a/contributing-docs/07_local_virtualenv.rst b/contributing-docs/07_local_virtualenv.rst index 501e23ac9556d..adb97fe99fa9c 100644 --- a/contributing-docs/07_local_virtualenv.rst +++ b/contributing-docs/07_local_virtualenv.rst @@ -34,7 +34,7 @@ Required Software Packages Use system-level package managers like yum, apt-get for Linux, or Homebrew for macOS to install required software packages: -* Python (One of: 3.9, 3.10, 3.11, 3.12) +* Python (One of: 3.10, 3.11, 3.12) * MySQL 5.7+ * libxml * helm (only for helm chart tests) @@ -100,11 +100,11 @@ Installing Python versions This step can be skipped - ``uv`` will automatically install the Python version you need when you create a virtualenv. -You can install Python versions using ``uv python install`` command. For example, to install Python 3.9.7, you can run: +You can install Python versions using ``uv python install`` command. For example, to install Python 3.10.7, you can run: .. code:: bash - uv python install 3.9.7 + uv python install 3.10.7 This is optional step - ``uv`` will automatically install the Python version you need when you create a virtualenv. @@ -124,7 +124,7 @@ with a specific Python version by running: .. code:: bash - uv venv --python 3.9.7 + uv venv --python 3.10.7 You can also create a venv with a different venv directory name by running: @@ -275,12 +275,12 @@ to avoid "works-for-me" syndrome, where you use different version of dependencie that are used in main, CI tests and by other contributors. There are different constraint files for different python versions. For example this command will install -all basic devel requirements and requirements of google provider as last successfully tested for Python 3.9: +all basic devel requirements and requirements of google provider as last successfully tested for Python 3.10: .. code:: bash uv pip install -e ".[devel,google]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.10.txt" In the future we will utilise ``uv.lock`` to manage dependencies and constraints, but for the moment we do not @@ -305,7 +305,7 @@ and install to latest supported ones by pure Airflow core. .. code:: bash uv pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.10.txt" These are examples of the development options available with the local virtualenv in your IDE: diff --git a/contributing-docs/08_static_code_checks.rst b/contributing-docs/08_static_code_checks.rst index 70fd44db68d8f..f342e3ab7843a 100644 --- a/contributing-docs/08_static_code_checks.rst +++ b/contributing-docs/08_static_code_checks.rst @@ -40,7 +40,7 @@ use. So, you can be sure your modifications will also work for CI if they pass pre-commit hooks. We have integrated the fantastic `pre-commit `__ framework -in our development workflow. To install and use it, you need at least Python 3.9 locally. +in our development workflow. To install and use it, you need at least Python 3.10 locally. Installing pre-commit hooks --------------------------- diff --git a/contributing-docs/testing/docker_compose_tests.rst b/contributing-docs/testing/docker_compose_tests.rst index 603c4cccffc3d..0a8bc1e5a1f80 100644 --- a/contributing-docs/testing/docker_compose_tests.rst +++ b/contributing-docs/testing/docker_compose_tests.rst @@ -48,7 +48,7 @@ Running complete test with breeze: .. code-block:: bash - breeze prod-image build --python 3.9 + breeze prod-image build --python 3.10 breeze testing docker-compose-tests In case the test fails, it will dump the logs from the running containers to the console and it @@ -65,8 +65,8 @@ to see the output of the test as it happens (it can be also set via The test can be also run manually with ``pytest docker_tests/test_docker_compose_quick_start.py`` command, provided that you have a local Airflow venv with ``dev`` extra set and the ``DOCKER_IMAGE`` environment variable is set to the image you want to test. The variable defaults -to ``ghcr.io/apache/airflow/main/prod/python3.9:latest`` which is built by default -when you run ``breeze prod-image build --python 3.9``. also the switches ``--skip-docker-compose-deletion`` +to ``ghcr.io/apache/airflow/main/prod/python3.10:latest`` which is built by default +when you run ``breeze prod-image build --python 3.10``. also the switches ``--skip-docker-compose-deletion`` and ``--wait-for-containers-timeout`` can only be passed via environment variables. If you want to debug the deployment using ``docker compose`` commands after ``SKIP_DOCKER_COMPOSE_DELETION`` @@ -87,7 +87,7 @@ the prod image build command above. .. code-block:: bash - export AIRFLOW_IMAGE_NAME=ghcr.io/apache/airflow/main/prod/python3.9:latest + export AIRFLOW_IMAGE_NAME=ghcr.io/apache/airflow/main/prod/python3.10:latest and follow the instructions in the `Running Airflow in Docker `_ diff --git a/contributing-docs/testing/k8s_tests.rst b/contributing-docs/testing/k8s_tests.rst index 49fb8e83bd44d..3910532cf494c 100644 --- a/contributing-docs/testing/k8s_tests.rst +++ b/contributing-docs/testing/k8s_tests.rst @@ -47,7 +47,7 @@ per each combination of Python and Kubernetes version. This is used during CI wh tests against those different clusters - even in parallel. The cluster name follows the pattern ``airflow-python-X.Y-vA.B.C`` where X.Y is a major/minor Python version -and A.B.C is Kubernetes version. Example cluster name: ``airflow-python-3.9-v1.24.0`` +and A.B.C is Kubernetes version. Example cluster name: ``airflow-python-3.10-v1.24.0`` Most of the commands can be executed in parallel for multiple images/clusters by adding ``--run-in-parallel`` to create clusters or deploy airflow. Similarly checking for status, dumping logs and deleting clusters @@ -234,7 +234,7 @@ Should result in KinD creating the K8S cluster. .. code-block:: text - Config created in /Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.24.2/.kindconfig.yaml: + Config created in /Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.10-v1.24.2/.kindconfig.yaml: kind: Cluster apiVersion: kind.x-k8s.io/v1alpha4 @@ -251,7 +251,7 @@ Should result in KinD creating the K8S cluster. listenAddress: "127.0.0.1" protocol: TCP - Creating cluster "airflow-python-3.9-v1.24.2" ... + Creating cluster "airflow-python-3.10-v1.24.2" ... ✓ Ensuring node image (kindest/node:v1.24.2) 🖼 ✓ Preparing nodes 📦 📦 ✓ Writing configuration 📜 @@ -259,10 +259,10 @@ Should result in KinD creating the K8S cluster. ✓ Installing CNI 🔌 ✓ Installing StorageClass 💾 ✓ Joining worker nodes 🚜 - Set kubectl context to "kind-airflow-python-3.9-v1.24.2" + Set kubectl context to "kind-airflow-python-3.10-v1.24.2" You can now use your cluster with: - kubectl cluster-info --context kind-airflow-python-3.9-v1.24.2 + kubectl cluster-info --context kind-airflow-python-3.10-v1.24.2 Not sure what to do next? 😅 Check out https://kind.sigs.k8s.io/docs/user/quick-start/ @@ -270,9 +270,9 @@ Should result in KinD creating the K8S cluster. Connecting to localhost:18150. Num try: 1 Error when connecting to localhost:18150 : ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) - Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.9 --kubernetes-version v1.24.2` to (re)deploy airflow + Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.10 --kubernetes-version v1.24.2` to (re)deploy airflow - KinD cluster airflow-python-3.9-v1.24.2 created! + KinD cluster airflow-python-3.10-v1.24.2 created! NEXT STEP: You might now configure your cluster by: @@ -287,20 +287,20 @@ Should result in KinD creating the K8S cluster. .. code-block:: text - Configuring airflow-python-3.9-v1.24.2 to be ready for Airflow deployment - Deleting K8S namespaces for kind-airflow-python-3.9-v1.24.2 + Configuring airflow-python-3.10-v1.24.2 to be ready for Airflow deployment + Deleting K8S namespaces for kind-airflow-python-3.10-v1.24.2 Error from server (NotFound): namespaces "airflow" not found Error from server (NotFound): namespaces "test-namespace" not found Creating namespaces namespace/airflow created namespace/test-namespace created - Created K8S namespaces for cluster kind-airflow-python-3.9-v1.24.2 + Created K8S namespaces for cluster kind-airflow-python-3.10-v1.24.2 - Deploying test resources for cluster kind-airflow-python-3.9-v1.24.2 + Deploying test resources for cluster kind-airflow-python-3.10-v1.24.2 persistentvolume/test-volume created persistentvolumeclaim/test-volume created service/airflow-webserver-node-port created - Deployed test resources for cluster kind-airflow-python-3.9-v1.24.2 + Deployed test resources for cluster kind-airflow-python-3.10-v1.24.2 NEXT STEP: You might now build your k8s image by: @@ -318,45 +318,45 @@ Should show the status of current KinD cluster. .. code-block:: text ======================================================================================================================== - Cluster: airflow-python-3.9-v1.24.2 + Cluster: airflow-python-3.10-v1.24.2 - * KUBECONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.24.2/.kubeconfig - * KINDCONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.24.2/.kindconfig.yaml + * KUBECONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.10-v1.24.2/.kubeconfig + * KINDCONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.10-v1.24.2/.kindconfig.yaml - Cluster info: airflow-python-3.9-v1.24.2 + Cluster info: airflow-python-3.10-v1.24.2 Kubernetes control plane is running at https://127.0.0.1:48366 CoreDNS is running at https://127.0.0.1:48366/api/v1/namespaces/kube-system/services/kube-dns:dns/proxy To further debug and diagnose cluster problems, use 'kubectl cluster-info dump'. - Storage class for airflow-python-3.9-v1.24.2 + Storage class for airflow-python-3.10-v1.24.2 NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE standard (default) rancher.io/local-path Delete WaitForFirstConsumer false 83s - Running pods for airflow-python-3.9-v1.24.2 + Running pods for airflow-python-3.10-v1.24.2 NAME READY STATUS RESTARTS AGE coredns-6d4b75cb6d-rwp9d 1/1 Running 0 71s coredns-6d4b75cb6d-vqnrc 1/1 Running 0 71s - etcd-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s + etcd-airflow-python-3.10-v1.24.2-control-plane 1/1 Running 0 84s kindnet-ckc8l 1/1 Running 0 69s kindnet-qqt8k 1/1 Running 0 71s - kube-apiserver-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s - kube-controller-manager-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s + kube-apiserver-airflow-python-3.10-v1.24.2-control-plane 1/1 Running 0 84s + kube-controller-manager-airflow-python-3.10-v1.24.2-control-plane 1/1 Running 0 84s kube-proxy-6g7hn 1/1 Running 0 69s kube-proxy-dwfvp 1/1 Running 0 71s - kube-scheduler-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s + kube-scheduler-airflow-python-3.10-v1.24.2-control-plane 1/1 Running 0 84s KinD Cluster API server URL: http://localhost:48366 Connecting to localhost:18150. Num try: 1 Error when connecting to localhost:18150 : ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) - Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.9 --kubernetes-version v1.24.2` to (re)deploy airflow + Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.10 --kubernetes-version v1.24.2` to (re)deploy airflow - Cluster healthy: airflow-python-3.9-v1.24.2 + Cluster healthy: airflow-python-3.10-v1.24.2 5. Build the image base on PROD Airflow image. You need to build the PROD image first (the command will guide you if you did not) either by running the build separately or passing ``--rebuild-base-image`` @@ -374,15 +374,15 @@ Should show the status of current KinD cluster. .. code-block:: text - Building the K8S image for Python 3.9 using Airflow base image: ghcr.io/apache/airflow/main/prod/python3.9:latest + Building the K8S image for Python 3.10 using Airflow base image: ghcr.io/apache/airflow/main/prod/python3.10:latest [+] Building 0.1s (8/8) FINISHED => [internal] load build definition from Dockerfile 0.0s => => transferring dockerfile: 301B 0.0s => [internal] load .dockerignore 0.0s => => transferring context: 35B 0.0s - => [internal] load metadata for ghcr.io/apache/airflow/main/prod/python3.9:latest 0.0s - => [1/3] FROM ghcr.io/apache/airflow/main/prod/python3.9:latest 0.0s + => [internal] load metadata for ghcr.io/apache/airflow/main/prod/python3.10:latest 0.0s + => [1/3] FROM ghcr.io/apache/airflow/main/prod/python3.10:latest 0.0s => [internal] load build context 0.0s => => transferring context: 3.00kB 0.0s => CACHED [2/3] COPY airflow/example_dags/ /opt/airflow/dags/ 0.0s @@ -390,7 +390,7 @@ Should show the status of current KinD cluster. => exporting to image 0.0s => => exporting layers 0.0s => => writing image sha256:c0bdd363c549c3b0731b8e8ce34153d081f239ee2b582355b7b3ffd5394c40bb 0.0s - => => naming to ghcr.io/apache/airflow/main/prod/python3.9-kubernetes:latest + => => naming to ghcr.io/apache/airflow/main/prod/python3.10-kubernetes:latest NEXT STEP: You might now upload your k8s image by: @@ -410,9 +410,9 @@ Should show the status of current KinD cluster. Good version of kubectl installed: 1.25.0 in /Users/jarek/IdeaProjects/airflow/kubernetes-tests/.venv/bin Good version of helm installed: 3.9.2 in /Users/jarek/IdeaProjects/airflow/kubernetes-tests/.venv/bin Stable repo is already added - Uploading Airflow image ghcr.io/apache/airflow/main/prod/python3.9-kubernetes to cluster airflow-python-3.9-v1.24.2 - Image: "ghcr.io/apache/airflow/main/prod/python3.9-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.9-v1.24.2-worker", loading... - Image: "ghcr.io/apache/airflow/main/prod/python3.9-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.9-v1.24.2-control-plane", loading... + Uploading Airflow image ghcr.io/apache/airflow/main/prod/python3.10-kubernetes to cluster airflow-python-3.10-v1.24.2 + Image: "ghcr.io/apache/airflow/main/prod/python3.10-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.10-v1.24.2-worker", loading... + Image: "ghcr.io/apache/airflow/main/prod/python3.10-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.10-v1.24.2-control-plane", loading... NEXT STEP: You might now deploy Airflow by: @@ -427,8 +427,8 @@ Should show the status of current KinD cluster. .. code-block:: text - Deploying Airflow for cluster airflow-python-3.9-v1.24.2 - Deploying kind-airflow-python-3.9-v1.24.2 with Airflow Helm Chart. + Deploying Airflow for cluster airflow-python-3.10-v1.24.2 + Deploying kind-airflow-python-3.10-v1.24.2 with Airflow Helm Chart. Copied chart sources to /private/var/folders/v3/gvj4_mw152q556w2rrh7m46w0000gn/T/chart_edu__kir/chart Deploying Airflow from /private/var/folders/v3/gvj4_mw152q556w2rrh7m46w0000gn/T/chart_edu__kir/chart NAME: airflow @@ -470,12 +470,12 @@ Should show the status of current KinD cluster. Information on how to set a static webserver secret key can be found here: https://airflow.apache.org/docs/helm-chart/stable/production-guide.html#api-secret-key - Deployed kind-airflow-python-3.9-v1.24.2 with Airflow Helm Chart. + Deployed kind-airflow-python-3.10-v1.24.2 with Airflow Helm Chart. - Airflow for Python 3.9 and K8S version v1.24.2 has been successfully deployed. + Airflow for Python 3.10 and K8S version v1.24.2 has been successfully deployed. - The KinD cluster name: airflow-python-3.9-v1.24.2 - The kubectl cluster name: kind-airflow-python-3.9-v1.24.2. + The KinD cluster name: airflow-python-3.10-v1.24.2 + The kubectl cluster name: kind-airflow-python-3.10-v1.24.2. KinD Cluster API server URL: http://localhost:48366 @@ -519,7 +519,7 @@ The virtualenv required will be created automatically when the scripts are run. Good version of helm installed: 3.16.4 in /Users/jarek/IdeaProjects/airflow/kubernetes-tests/.venv/bin Stable repo is already added - Running tests with kind-airflow-python-3.9-v1.29.12 cluster. + Running tests with kind-airflow-python-3.10-v1.29.12 cluster. Command to run: uv run pytest kubernetes-tests/tests/ Installed 74 packages in 179ms /Users/jarek/IdeaProjects/airflow/.venv/lib/python3.12/site-packages/pytest_asyncio/plugin.py:208: PytestDeprecationWarning: The configuration option "asyncio_default_fixture_loop_scope" is unset. @@ -553,7 +553,7 @@ Once you enter the environment, you receive this information: Entering interactive k8s shell. - (kind-airflow-python-3.9-v1.24.2:KubernetesExecutor)> + (kind-airflow-python-3.10-v1.24.2:KubernetesExecutor)> In a separate terminal you can open the k9s CLI: @@ -603,7 +603,7 @@ environment variable copying it from the result of "breeze k8s tests": echo ${KUBECONFIG} - /home/jarek/code/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.28.13/.kube/config + /home/jarek/code/airflow/.build/.k8s-clusters/airflow-python-3.10-v1.28.13/.kube/config .. image:: images/kubeconfig-env.png :align: center @@ -671,9 +671,9 @@ Kind has also useful commands to inspect your running cluster: .. code-block:: text - Deleting KinD cluster airflow-python-3.9-v1.24.2! - Deleting cluster "airflow-python-3.9-v1.24.2" ... - KinD cluster airflow-python-3.9-v1.24.2 deleted! + Deleting KinD cluster airflow-python-3.10-v1.24.2! + Deleting cluster "airflow-python-3.10-v1.24.2" ... + KinD cluster airflow-python-3.10-v1.24.2 deleted! Running complete k8s tests diff --git a/contributing-docs/testing/unit_tests.rst b/contributing-docs/testing/unit_tests.rst index 698096f702990..c15776a963214 100644 --- a/contributing-docs/testing/unit_tests.rst +++ b/contributing-docs/testing/unit_tests.rst @@ -185,7 +185,7 @@ rerun in Breeze as you will (``-n auto`` will parallelize tests using ``pytest-x .. code-block:: bash - breeze shell --backend none --python 3.9 + breeze shell --backend none --python 3.10 > pytest airflow-core/tests --skip-db-tests -n auto @@ -227,7 +227,7 @@ You can also run DB tests with ``breeze`` dockerized environment. You can choose ``--backend`` flag. The default is ``sqlite`` but you can also use others such as ``postgres`` or ``mysql``. You can also select backend version and Python version to use. You can specify the ``test-type`` to run - breeze will list the test types you can run with ``--help`` and provide auto-complete for them. Example -below runs the ``Core`` tests with ``postgres`` backend and ``3.9`` Python version +below runs the ``Core`` tests with ``postgres`` backend and ``3.10`` Python version You can also run the commands via ``breeze testing core-tests`` or ``breeze testing providers-tests`` - by adding the parallel flags manually: @@ -249,7 +249,7 @@ either by package/module/test or by test type - whatever ``pytest`` supports. .. code-block:: bash - breeze shell --backend postgres --python 3.9 + breeze shell --backend postgres --python 3.10 > pytest airflow-core/tests --run-db-tests-only As explained before, you cannot run DB tests in parallel using ``pytest-xdist`` plugin, but ``breeze`` has @@ -258,7 +258,7 @@ and you can run the tests using ``--run-in-parallel`` flag. .. code-block:: bash - breeze testing core-tests --run-db-tests-only --backend postgres --python 3.9 --run-in-parallel + breeze testing core-tests --run-db-tests-only --backend postgres --python 3.10 --run-in-parallel Examples of marking test as DB test ................................... @@ -1168,7 +1168,7 @@ Herr id how to reproduce it. .. code-block:: bash - breeze ci-image build --python 3.9 + breeze ci-image build --python 3.10 2. Build providers from latest sources: diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index ba1066d02d12a..0fafbc345a7e9 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -789,7 +789,7 @@ Optionally it can be followed with constraints ```shell script pip install apache-airflow==rc \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-/constraints-3.10.txt" ``` Note that the constraints contain python version that you are installing it with. @@ -801,7 +801,7 @@ There is also an easy way of installation with Breeze if you have the latest sou Running the following command will use tmux inside breeze, create `admin` user and run Webserver & Scheduler: ```shell script -breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.9 --backend postgres +breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.10 --backend postgres ``` You can also choose different executors and extras to install when you are installing airflow this way. For @@ -809,7 +809,7 @@ example in order to run Airflow with CeleryExecutor and install celery, google a Airflow 2.7.0, you need to have celery provider installed to run Airflow with CeleryExecutor) you can run: ```shell script -breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.9 --backend postgres \ +breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.10 --backend postgres \ --executor CeleryExecutor --airflow-extras "celery,google,amazon" ``` diff --git a/dev/README_RELEASE_PROVIDERS.md b/dev/README_RELEASE_PROVIDERS.md index f0a45a560af28..089659c91751b 100644 --- a/dev/README_RELEASE_PROVIDERS.md +++ b/dev/README_RELEASE_PROVIDERS.md @@ -1002,7 +1002,7 @@ pip install apache-airflow-providers-==rc ### Installing with Breeze ```shell -breeze start-airflow --use-airflow-version 2.2.4 --python 3.9 --backend postgres \ +breeze start-airflow --use-airflow-version 2.2.4 --python 3.10 --backend postgres \ --load-example-dags --load-default-connections ``` diff --git a/dev/breeze/README.md b/dev/breeze/README.md index d1c689d1791c3..f89c5537f10c9 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -135,6 +135,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: 098162a9dca075feeada933f58455aa74f58811a34959f06bd3298aef9773a60f3800e9d5d00b9335d300572842a2838f06bd83f7a79a5ce5f529fb6202059af +Package config hash: aff0246d8f21f59dc1299a6d8298e166ece38be97ee62f0fcb8641e59d991f7e8d649cfcd7d709b20c75d251761e1b3fd18dabe9d2ebc9f45ea58f7379fa125c --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/doc/01_installation.rst b/dev/breeze/doc/01_installation.rst index 9b54ed17d7542..5f0acd5e080f8 100644 --- a/dev/breeze/doc/01_installation.rst +++ b/dev/breeze/doc/01_installation.rst @@ -332,7 +332,7 @@ that Breeze works on .. warning:: Upgrading from earlier Python version - If you used Breeze with Python 3.8 and when running it, it will complain that it needs Python 3.9. In this + If you used Breeze with Python 3.8 and when running it, it will complain that it needs Python 3.10. In this case you should force-reinstall Breeze with ``uv`` (or ``pipx``): .. code-block:: bash @@ -373,13 +373,13 @@ that Breeze works on .. code-block:: bash - uv tool install --python 3.9.16 ./dev/breeze --force + uv tool install --python 3.10.16 ./dev/breeze --force or .. code-block:: bash - pipx install -e ./dev/breeze --python /Users/airflow/.pyenv/versions/3.9.16/bin/python --force + pipx install -e ./dev/breeze --python /Users/airflow/.pyenv/versions/3.10.16/bin/python --force Running Breeze for the first time diff --git a/dev/breeze/doc/03_developer_tasks.rst b/dev/breeze/doc/03_developer_tasks.rst index 3a425469283de..d269e98a0c840 100644 --- a/dev/breeze/doc/03_developer_tasks.rst +++ b/dev/breeze/doc/03_developer_tasks.rst @@ -34,12 +34,12 @@ You can use additional ``breeze`` flags to choose your environment. You can spec version to use, and backend (the meta-data database). Thanks to that, with Breeze, you can recreate the same environments as we have in matrix builds in the CI. See next chapter for backend selection. -For example, you can choose to run Python 3.9 tests with MySQL as backend and with mysql version 8 +For example, you can choose to run Python 3.10 tests with MySQL as backend and with mysql version 8 as follows: .. code-block:: bash - breeze --python 3.9 --backend mysql --mysql-version 8.0 + breeze --python 3.10 --backend mysql --mysql-version 8.0 .. note:: Note for Windows WSL2 users @@ -55,7 +55,7 @@ Try adding ``--builder=default`` to your command. For example: .. code-block:: bash - breeze --builder=default --python 3.9 --backend mysql --mysql-version 8.0 + breeze --builder=default --python 3.10 --backend mysql --mysql-version 8.0 The choices you make are persisted in the ``./.build/`` cache directory so that next time when you use the ``breeze`` script, it could use the values that were used previously. This way you do not have to specify @@ -363,7 +363,7 @@ When you are starting Airflow from local sources, www asset compilation is autom .. code-block:: bash - breeze --python 3.9 --backend mysql start-airflow + breeze --python 3.10 --backend mysql start-airflow You can also use it to start different executor. @@ -376,7 +376,7 @@ You can also use it to start any released version of Airflow from ``PyPI`` with .. code-block:: bash - breeze start-airflow --python 3.9 --backend mysql --use-airflow-version 2.7.0 + breeze start-airflow --python 3.10 --backend mysql --use-airflow-version 2.7.0 When you are installing version from PyPI, it's also possible to specify extras that should be used when installing Airflow - you can provide several extras separated by coma - for example to install diff --git a/dev/breeze/doc/04_troubleshooting.rst b/dev/breeze/doc/04_troubleshooting.rst index f9828ef1b700e..9f7a409b01f95 100644 --- a/dev/breeze/doc/04_troubleshooting.rst +++ b/dev/breeze/doc/04_troubleshooting.rst @@ -83,7 +83,7 @@ describe your problem. stated in `This comment `_ and allows to run Breeze with no problems. -Cannot import name 'cache' or Python >=3.9 required +Cannot import name 'cache' or Python >=3.10 required --------------------------------------------------- When you see this error: @@ -96,7 +96,7 @@ or .. code-block:: - ERROR: Package 'blacken-docs' requires a different Python: 3.8.18 not in '>=3.9' + ERROR: Package 'blacken-docs' requires a different Python: 3.8.18 not in '>=3.10' It means that your pre-commit hook is installed with (already End-Of-Life) Python 3.8 and you should reinstall @@ -107,7 +107,7 @@ This can be done with ``uv tool`` to install ``pre-commit``) .. code-block:: bash uv tool uninstall pre-commit - uv tool install pre-commit --python 3.9 --force --with pre-commit-uv + uv tool install pre-commit --python 3.10 --force --with pre-commit-uv pre-commit clean pre-commit install @@ -116,7 +116,7 @@ You can also use ``pipx`` .. code-block:: bash pipx uninstall pre-commit - pipx install pre-commit --python $(which python3.9) --force + pipx install pre-commit --python $(which python3.10) --force # This one allows pre-commit to use uv for venvs installed by pre-commit pipx inject pre-commit pre-commit-uv # optionally if you want to use uv to install virtualenvs pre-commit clean diff --git a/dev/breeze/doc/05_test_commands.rst b/dev/breeze/doc/05_test_commands.rst index a51013e171a25..1253790707b49 100644 --- a/dev/breeze/doc/05_test_commands.rst +++ b/dev/breeze/doc/05_test_commands.rst @@ -570,7 +570,7 @@ as executor you use, similar to: .. code-block:: bash - (kind-airflow-python-3.9-v1.24.0:KubernetesExecutor)> + (kind-airflow-python-3.10-v1.24.0:KubernetesExecutor)> The shell automatically activates the virtual environment that has all appropriate dependencies @@ -579,7 +579,7 @@ be created and Airflow deployed to it before running the tests): .. code-block:: bash - (kind-airflow-python-3.9-v1.24.0:KubernetesExecutor)> pytest test_kubernetes_executor.py + (kind-airflow-python-3.10-v1.24.0:KubernetesExecutor)> pytest test_kubernetes_executor.py ================================================= test session starts ================================================= platform linux -- Python 3.10.6, pytest-6.2.5, py-1.11.0, pluggy-1.0.0 -- /home/jarek/code/airflow/kubernetes-tests/.venv/bin/python cachedir: .pytest_cache @@ -598,7 +598,7 @@ be created and Airflow deployed to it before running the tests): -- Docs: https://docs.pytest.org/en/stable/warnings.html ============================================ 2 passed, 1 warning in 38.62s ============================================ - (kind-airflow-python-3.9-v1.24.0:KubernetesExecutor)> + (kind-airflow-python-3.10-v1.24.0:KubernetesExecutor)> All parameters of the command are here: diff --git a/dev/breeze/doc/06_managing_docker_images.rst b/dev/breeze/doc/06_managing_docker_images.rst index d743a7ef7bc17..d30e73728c8b2 100644 --- a/dev/breeze/doc/06_managing_docker_images.rst +++ b/dev/breeze/doc/06_managing_docker_images.rst @@ -120,13 +120,13 @@ To load the image from specific PR, you can use the following command: .. code-block:: bash - breeze ci-image load --from-pr 12345 --python 3.9 --github-token + breeze ci-image load --from-pr 12345 --python 3.10 --github-token To load the image from specific job run (for example 12538475388), you can use the following command, find the run id from github action runs. .. code-block:: bash - breeze ci-image load --from-run 12538475388 --python 3.9 --github-token + breeze ci-image load --from-run 12538475388 --python 3.10 --github-token After you load the image, you can reproduce the very exact environment that was used in the CI run by entering breeze container without mounting your local sources: @@ -220,10 +220,10 @@ suffix and they need to also be paired with corresponding runtime dependency add .. code-block:: bash - breeze prod-image build --python 3.9 --additional-dev-deps "libasound2-dev" \ + breeze prod-image build --python 3.10 --additional-dev-deps "libasound2-dev" \ --additional-runtime-apt-deps "libasound2" -Same as above but uses python 3.9. +Same as above but uses python 3.10. Building PROD image ................... diff --git a/dev/breeze/doc/10_advanced_breeze_topics.rst b/dev/breeze/doc/10_advanced_breeze_topics.rst index 3cd22855db2fe..d3f393c2b3963 100644 --- a/dev/breeze/doc/10_advanced_breeze_topics.rst +++ b/dev/breeze/doc/10_advanced_breeze_topics.rst @@ -49,7 +49,7 @@ make sure to follow these steps: this will bypass the check we run in Breeze to see if there are new requirements to install for it See example configuration for PyCharm which has run/debug configuration for -``breeze sbom generate-providers-requirements --provider-id sqlite --python 3.9`` +``breeze sbom generate-providers-requirements --provider-id sqlite --python 3.10`` .. raw:: html diff --git a/dev/breeze/doc/adr/0002-implement-standalone-python-command.md b/dev/breeze/doc/adr/0002-implement-standalone-python-command.md index 87bbe46a6486e..79401b85f95b5 100644 --- a/dev/breeze/doc/adr/0002-implement-standalone-python-command.md +++ b/dev/breeze/doc/adr/0002-implement-standalone-python-command.md @@ -138,7 +138,7 @@ There are a few properties of Breeze/CI scripts that should be maintained though run a command and get everything done with the least number of prerequisites * The prerequisites for Breeze and CI are: - * Python 3.9+ (Python 3.9 end of life is October 2025) + * Python 3.10+ (Python 3.10 end of life is October 2025) * Docker (23.0+) * Docker Compose (2.16.0+) * No other tools and CLI commands should be needed diff --git a/dev/breeze/doc/ci/02_images.md b/dev/breeze/doc/ci/02_images.md index 00007ded26e71..4dbfdb6e19637 100644 --- a/dev/breeze/doc/ci/02_images.md +++ b/dev/breeze/doc/ci/02_images.md @@ -129,17 +129,17 @@ The images are built with default extras - different extras for CI and production image and you can change the extras via the `--airflow-extras` parameters and add new ones with `--additional-airflow-extras`. -For example if you want to build Python 3.9 version of production image +For example if you want to build Python 3.10 version of production image with "all" extras installed you should run this command: ``` bash -breeze prod-image build --python 3.9 --airflow-extras "all" +breeze prod-image build --python 3.10 --airflow-extras "all" ``` If you just want to add new extras you can add them like that: ``` bash -breeze prod-image build --python 3.9 --additional-airflow-extras "all" +breeze prod-image build --python 3.10 --additional-airflow-extras "all" ``` The command that builds the CI image is optimized to minimize the time @@ -160,7 +160,7 @@ You can also build production images from PIP packages via providing `--install-airflow-version` parameter to Breeze: ``` bash -breeze prod-image build --python 3.9 --additional-airflow-extras=trino --install-airflow-version=2.0.0 +breeze prod-image build --python 3.10 --additional-airflow-extras=trino --install-airflow-version=2.0.0 ``` This will build the image using command similar to: @@ -168,7 +168,7 @@ This will build the image using command similar to: ``` bash pip install \ apache-airflow[async,amazon,celery,cncf.kubernetes,docker,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv]==2.0.0 \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.10.txt" ``` > [!NOTE] @@ -199,7 +199,7 @@ HEAD of development for constraints): ``` bash pip install "https://github.com/apache/airflow/archive/.tar.gz#egg=apache-airflow" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.10.txt" ``` You can also skip installing airflow and install it from locally @@ -207,7 +207,7 @@ provided files by using `--install-distributions-from-context` parameter to Breeze: ``` bash -breeze prod-image build --python 3.9 --additional-airflow-extras=trino --install-distributions-from-context +breeze prod-image build --python 3.10 --additional-airflow-extras=trino --install-distributions-from-context ``` In this case you airflow and all packages (.whl files) should be placed @@ -243,20 +243,20 @@ flags: `registry` (default), `local`, or `disabled` flags when you run Breeze commands. For example: ``` bash -breeze ci-image build --python 3.9 --docker-cache local +breeze ci-image build --python 3.10 --docker-cache local ``` Will build the CI image using local build cache (note that it will take quite a long time the first time you run it). ``` bash -breeze prod-image build --python 3.9 --docker-cache registry +breeze prod-image build --python 3.10 --docker-cache registry ``` Will build the production image with cache used from registry. ``` bash -breeze prod-image build --python 3.9 --docker-cache disabled +breeze prod-image build --python 3.10 --docker-cache disabled ``` Will build the production image from the scratch. @@ -359,7 +359,7 @@ you can build the image in the Here just a few examples are presented which should give you general understanding of what you can customize. -This builds the production image in version 3.9 with additional airflow +This builds the production image in version 3.10 with additional airflow extras from 2.0.0 PyPI package and additional apt dev and runtime dependencies. @@ -371,7 +371,7 @@ plugin installed. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \ --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \ @@ -382,7 +382,7 @@ the same image can be built using `breeze` (it supports auto-completion of the options): ``` bash -breeze ci-image build --python 3.9 --additional-airflow-extras=jdbc --additional-python-deps="pandas" \ +breeze ci-image build --python 3.10 --additional-airflow-extras=jdbc --additional-python-deps="pandas" \ --additional-dev-apt-deps="gcc g++" ``` @@ -396,7 +396,7 @@ comment](https://github.com/apache/airflow/issues/8605#issuecomment-690065621): ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack" \ --build-arg ADDITIONAL_PYTHON_DEPS="apache-airflow-providers-odbc \ @@ -419,93 +419,93 @@ DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ The following build arguments (`--build-arg` in docker build command) can be used for CI images: -| Build argument | Default value | Description | -|-----------------------------------|----------------------------|-------------------------------------------------------------------------------------------------------------------| -| `PYTHON_BASE_IMAGE` | `python:3.9-slim-bookworm` | Base Python image | -| `PYTHON_MAJOR_MINOR_VERSION` | `3.9` | major/minor version of Python (should match base image) | -| `DEPENDENCIES_EPOCH_NUMBER` | `2` | increasing this number will reinstall all apt dependencies | -| `ADDITIONAL_PIP_INSTALL_FLAGS` | | additional `pip` flags passed to the installation commands (except when reinstalling `pip` itself) | -| `HOME` | `/root` | Home directory of the root user (CI image has root user as default) | -| `AIRFLOW_HOME` | `/root/airflow` | Airflow's HOME (that's where logs and sqlite databases are stored) | -| `AIRFLOW_SOURCES` | `/opt/airflow` | Mounted sources of Airflow | -| `AIRFLOW_REPO` | `apache/airflow` | the repository from which PIP dependencies are pre-installed | -| `AIRFLOW_BRANCH` | `main` | the branch from which PIP dependencies are pre-installed | -| `AIRFLOW_CI_BUILD_EPOCH` | `1` | increasing this value will reinstall PIP dependencies from the repository from scratch | -| `AIRFLOW_CONSTRAINTS_LOCATION` | | If not empty, it will override the source of the constraints with the specified URL or file. | -| `AIRFLOW_CONSTRAINTS_REFERENCE` | `constraints-main` | reference (branch or tag) from GitHub repository from which constraints are used. | -| `AIRFLOW_EXTRAS` | `all` | extras to install | -| `UPGRADE_RANDOM_INDICATOR_STRING` | | If set to any random value the dependencies are upgraded to newer versions. In CI it is set to build id. | -| `ADDITIONAL_AIRFLOW_EXTRAS` | | additional extras to install | -| `ADDITIONAL_PYTHON_DEPS` | | additional Python dependencies to install | -| `DEV_APT_COMMAND` | | Dev apt command executed before dev deps are installed in the first part of image | -| `ADDITIONAL_DEV_APT_COMMAND` | | Additional Dev apt command executed before dev dep are installed in the first part of the image | -| `DEV_APT_DEPS` | | Dev APT dependencies installed in the first part of the image (default empty means default dependencies are used) | -| `ADDITIONAL_DEV_APT_DEPS` | | Additional apt dev dependencies installed in the first part of the image | -| `ADDITIONAL_DEV_APT_ENV` | | Additional env variables defined when installing dev deps | -| `AIRFLOW_PIP_VERSION` | `25.1.1` | `pip` version used. | -| `AIRFLOW_UV_VERSION` | `0.7.16` | `uv` version used. | -| `AIRFLOW_PRE_COMMIT_VERSION` | `4.2.0` | `pre-commit` version used. | -| `AIRFLOW_PRE_COMMIT_UV_VERSION` | `4.1.4` | `pre-commit-uv` version used. | -| `AIRFLOW_USE_UV` | `true` | Whether to use UV for installation. | -| `PIP_PROGRESS_BAR` | `on` | Progress bar for PIP installation | +| Build argument | Default value | Description | +|-----------------------------------|-----------------------------|-------------------------------------------------------------------------------------------------------------------| +| `PYTHON_BASE_IMAGE` | `python:3.10-slim-bookworm` | Base Python image | +| `PYTHON_MAJOR_MINOR_VERSION` | `3.10` | major/minor version of Python (should match base image) | +| `DEPENDENCIES_EPOCH_NUMBER` | `2` | increasing this number will reinstall all apt dependencies | +| `ADDITIONAL_PIP_INSTALL_FLAGS` | | additional `pip` flags passed to the installation commands (except when reinstalling `pip` itself) | +| `HOME` | `/root` | Home directory of the root user (CI image has root user as default) | +| `AIRFLOW_HOME` | `/root/airflow` | Airflow's HOME (that's where logs and sqlite databases are stored) | +| `AIRFLOW_SOURCES` | `/opt/airflow` | Mounted sources of Airflow | +| `AIRFLOW_REPO` | `apache/airflow` | the repository from which PIP dependencies are pre-installed | +| `AIRFLOW_BRANCH` | `main` | the branch from which PIP dependencies are pre-installed | +| `AIRFLOW_CI_BUILD_EPOCH` | `1` | increasing this value will reinstall PIP dependencies from the repository from scratch | +| `AIRFLOW_CONSTRAINTS_LOCATION` | | If not empty, it will override the source of the constraints with the specified URL or file. | +| `AIRFLOW_CONSTRAINTS_REFERENCE` | `constraints-main` | reference (branch or tag) from GitHub repository from which constraints are used. | +| `AIRFLOW_EXTRAS` | `all` | extras to install | +| `UPGRADE_RANDOM_INDICATOR_STRING` | | If set to any random value the dependencies are upgraded to newer versions. In CI it is set to build id. | +| `ADDITIONAL_AIRFLOW_EXTRAS` | | additional extras to install | +| `ADDITIONAL_PYTHON_DEPS` | | additional Python dependencies to install | +| `DEV_APT_COMMAND` | | Dev apt command executed before dev deps are installed in the first part of image | +| `ADDITIONAL_DEV_APT_COMMAND` | | Additional Dev apt command executed before dev dep are installed in the first part of the image | +| `DEV_APT_DEPS` | | Dev APT dependencies installed in the first part of the image (default empty means default dependencies are used) | +| `ADDITIONAL_DEV_APT_DEPS` | | Additional apt dev dependencies installed in the first part of the image | +| `ADDITIONAL_DEV_APT_ENV` | | Additional env variables defined when installing dev deps | +| `AIRFLOW_PIP_VERSION` | `25.1.1` | `pip` version used. | +| `AIRFLOW_UV_VERSION` | `0.7.16` | `uv` version used. | +| `AIRFLOW_PRE_COMMIT_VERSION` | `4.2.0` | `pre-commit` version used. | +| `AIRFLOW_PRE_COMMIT_UV_VERSION` | `4.1.4` | `pre-commit-uv` version used. | +| `AIRFLOW_USE_UV` | `true` | Whether to use UV for installation. | +| `PIP_PROGRESS_BAR` | `on` | Progress bar for PIP installation | Here are some examples of how CI images can built manually. CI is always built from local sources. -This builds the CI image in version 3.9 with default extras ("all"). +This builds the CI image in version 3.10 with default extras ("all"). ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" --tag my-image:0.0.1 + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.9 with "gcp" extra only. +This builds the CI image in version 3.10 with "gcp" extra only. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_EXTRAS=gcp --tag my-image:0.0.1 ``` -This builds the CI image in version 3.9 with "apache-beam" extra added. +This builds the CI image in version 3.10 with "apache-beam" extra added. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="apache-beam" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.9 with "mssql" additional package +This builds the CI image in version 3.10 with "mssql" additional package added. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg ADDITIONAL_PYTHON_DEPS="mssql" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.9 with "gcc" and "g++" additional +This builds the CI image in version 3.10 with "gcc" and "g++" additional apt dev dependencies added. ``` DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.9 with "jdbc" extra and +This builds the CI image in version 3.10 with "jdbc" extra and "default-jre-headless" additional apt runtime dependencies added. ``` DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_EXTRAS=jdbc \ --tag my-image:0.0.1 ``` @@ -567,7 +567,7 @@ percent-encoded when you access them via UI (/ = %2F) | PROD image | airflow/\/prod/python\ | faster to build or pull. Production image optimized for size. | - \ might be either "main" or "v2-\*-test" -- \ - Python version (Major + Minor).Should be one of \["3.9", "3.10", "3.11", "3.12" \]. +- \ - Python version (Major + Minor).Should be one of \["3.10", "3.11", "3.12" \]. ---- diff --git a/dev/breeze/doc/ci/04_selective_checks.md b/dev/breeze/doc/ci/04_selective_checks.md index 615bee819b782..15a60d24670d0 100644 --- a/dev/breeze/doc/ci/04_selective_checks.md +++ b/dev/breeze/doc/ci/04_selective_checks.md @@ -164,79 +164,79 @@ separated by spaces. This is to accommodate for the wau how outputs of this kind GitHub Actions to pass the list of parameters to a command to execute -| Output | Meaning of the output | Example value | List | -|---------------------------------------------------------|--------------------------------------------------------------------------------------------------------|-----------------------------------------|------| -| all-python-versions | List of all python versions there are available in the form of JSON array | \['3.9', '3.10'\] | | -| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.9 3.10 | * | -| all-versions | If set to true, then all python, k8s, DB versions are used for tests. | false | | -| basic-checks-only | Whether to run all static checks ("false") or only basic set of static checks ("true") | false | | -| build_system_changed_in_pyproject_toml | When builds system dependencies changed in pyproject.toml changed in the PR. | false | | -| ci-image-build | Whether CI image build is needed | true | | -| core-test-types-list-as-strings-in-json | Which test types should be run for unit tests for core | API Always Providers | * | -| debug-resources | Whether resources usage should be printed during parallel job execution ("true"/ "false") | false | | -| default-branch | Which branch is default for the build ("main" for main branch, "v2-4-test" for 2.4 line etc.) | main | | -| default-constraints-branch | Which branch is default for the build ("constraints-main" for main branch, "constraints-2-4" etc.) | constraints-main | | -| default-helm-version | Which Helm version to use as default | v3.9.4 | | -| default-kind-version | Which Kind version to use as default | v0.16.0 | | -| default-kubernetes-version | Which Kubernetes version to use as default | v1.25.2 | | -| default-mysql-version | Which MySQL version to use as default | 5.7 | | -| default-postgres-version | Which Postgres version to use as default | 10 | | -| default-python-version | Which Python version to use as default | 3.9 | | -| disable-airflow-repo-cache | Disables cache of the repo main cache in CI - aiflow will be installed without main installation cache | true | | -| docker-cache | Which cache should be used for images ("registry", "local" , "disabled") | registry | | -| docs-build | Whether to build documentation ("true"/"false") | true | | -| docs-list-as-string | What filter to apply to docs building - based on which documentation packages should be built | apache-airflow helm-chart google | * | -| excluded-providers-as-string c | List of providers that should be excluded from the build as space-separated string | amazon google | * | -| force-pip | Whether pip should be forced in the image build instead of uv ("true"/"false") | false | | -| full-tests-needed | Whether this build runs complete set of tests or only subset (for faster PR builds) \[1\] | false | | -| generated-dependencies-changed | Whether generated dependencies have changed ("true"/"false") | false | | -| has-migrations | Whether the PR has migrations ("true"/"false") | false | | -| hatch-build-changed | When hatch build.py changed in the PR. | false | | -| helm-test-packages-list-as-string | List of helm packages to test as JSON array | \["airflow_aux", "airflow_core"\] | * | -| helm-version | Which Helm version to use for tests | v3.15.3 | | -| include-success-outputs | Whether to include outputs of successful parallel tests ("true"/"false") | false | | -| individual-providers-test-types-list-as-strings-in-json | Which test types should be run for unit tests for providers (individually listed) | Providers[\amazon\] Providers\[google\] | * | -| is-committer-build | Whether the build is triggered by a committer | false | | -| is-legacy-ui-api-labeled | Whether the PR is labeled as legacy UI/API | false | | -| kind-version | Which Kind version to use for tests | v0.24.0 | | -| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.9-v1.25.2 3.10-v1.28.13 | * | -| kubernetes-versions | All Kubernetes versions to use for tests as JSON array | \['v1.25.2'\] | | -| kubernetes-versions-list-as-string | All Kubernetes versions to use for tests as space-separated string | v1.25.2 | * | -| latest-versions-only | If set, the number of Python, Kubernetes, DB versions will be limited to the latest ones. | false | | -| mypy-checks | List of folders to be considered for mypy checks | \["airflow_aux", "airflow_core"\] | | -| mysql-exclude | Which versions of MySQL to exclude for tests as JSON array | [] | | -| mysql-versions | Which versions of MySQL to use for tests as JSON array | \['8.0'\] | | -| needs-api-codegen | Whether "api-codegen" are needed to run ("true"/"false") | true | | -| needs-api-tests | Whether "api-tests" are needed to run ("true"/"false") | true | | -| needs-helm-tests | Whether Helm tests are needed to run ("true"/"false") | true | | -| needs-javascript-scans | Whether javascript CodeQL scans should be run ("true"/"false") | true | | -| needs-mypy | Whether mypy check is supposed to run in this build | true | | -| needs-python-scans | Whether Python CodeQL scans should be run ("true"/"false") | true | | -| only-new-ui-files | Whether only new UI files are present in the PR ("true"/"false") | false | | -| postgres-exclude | Which versions of Postgres to exclude for tests as JSON array | [] | | -| postgres-versions | Which versions of Postgres to use for tests as JSON array | \['12'\] | | -| prod-image-build | Whether PROD image build is needed | true | | -| providers-compatibility-tests-matrix | Matrix of providers compatibility tests: (python_version, airflow_version, removed_providers) | \[{}\] | | -| providers-test-types-list-as-strings-in-json | Which test types should be run for unit tests for providers | Providers Providers\[-google\] | * | -| pyproject-toml-changed | When pyproject.toml changed in the PR. | false | | -| python-versions | List of python versions to use for that build | \['3.9'\] | | -| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.9 | * | -| run-amazon-tests | Whether Amazon tests should be run ("true"/"false") | true | | -| run-kubernetes-tests | Whether Kubernetes tests should be run ("true"/"false") | true | | -| run-system-tests | Whether system tests should be run ("true"/"false") | true | | -| run-task-sdk-tests | Whether Task SDK tests should be run ("true"/"false") | true | | -| run-tests | Whether unit tests should be run ("true"/"false") | true | | -| run-ui-tests | Whether UI tests should be run ("true"/"false") | true | | -| run-www-tests | Whether Legacy WWW tests should be run ("true"/"false") | true | | -| amd-runners | List of labels assigned for runners for that build for public AMD runners | \["ubuntu-22.04"\] | | -| arm-runners | List of labels assigned for runners for that build for public ARM runners | \["ubuntu-22.04-arm"\] | | -| selected-providers-list-as-string | List of providers affected when they are selectively affected. | airbyte http | * | -| skip-pre-commits | Which pre-commits should be skipped during the static-checks run | flynt,identity | | -| skip-providers-tests | When provider tests should be skipped (on non-main branch or when no provider changes detected) | true | | -| sqlite-exclude | Which versions of Sqlite to exclude for tests as JSON array | [] | | -| testable-core-integrations | List of core integrations that are testable in the build as JSON array | \['celery', 'kerberos'\] | | -| testable-providers-integrations | List of core integrations that are testable in the build as JSON array | \['mongo', 'kafka'\] | | -| upgrade-to-newer-dependencies | Whether the image build should attempt to upgrade all dependencies (true/false or commit hash) | false | | +| Output | Meaning of the output | Example value | List | +|---------------------------------------------------------|--------------------------------------------------------------------------------------------------------|------------------------------------------|------| +| all-python-versions | List of all python versions there are available in the form of JSON array | \['3.10', '3.11'\] | | +| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.10 3.11 | * | +| all-versions | If set to true, then all python, k8s, DB versions are used for tests. | false | | +| basic-checks-only | Whether to run all static checks ("false") or only basic set of static checks ("true") | false | | +| build_system_changed_in_pyproject_toml | When builds system dependencies changed in pyproject.toml changed in the PR. | false | | +| ci-image-build | Whether CI image build is needed | true | | +| core-test-types-list-as-strings-in-json | Which test types should be run for unit tests for core | API Always Providers | * | +| debug-resources | Whether resources usage should be printed during parallel job execution ("true"/ "false") | false | | +| default-branch | Which branch is default for the build ("main" for main branch, "v2-4-test" for 2.4 line etc.) | main | | +| default-constraints-branch | Which branch is default for the build ("constraints-main" for main branch, "constraints-2-4" etc.) | constraints-main | | +| default-helm-version | Which Helm version to use as default | v3.9.4 | | +| default-kind-version | Which Kind version to use as default | v0.16.0 | | +| default-kubernetes-version | Which Kubernetes version to use as default | v1.25.2 | | +| default-mysql-version | Which MySQL version to use as default | 5.7 | | +| default-postgres-version | Which Postgres version to use as default | 10 | | +| default-python-version | Which Python version to use as default | 3.10 | | +| disable-airflow-repo-cache | Disables cache of the repo main cache in CI - aiflow will be installed without main installation cache | true | | +| docker-cache | Which cache should be used for images ("registry", "local" , "disabled") | registry | | +| docs-build | Whether to build documentation ("true"/"false") | true | | +| docs-list-as-string | What filter to apply to docs building - based on which documentation packages should be built | apache-airflow helm-chart google | * | +| excluded-providers-as-string c | List of providers that should be excluded from the build as space-separated string | amazon google | * | +| force-pip | Whether pip should be forced in the image build instead of uv ("true"/"false") | false | | +| full-tests-needed | Whether this build runs complete set of tests or only subset (for faster PR builds) \[1\] | false | | +| generated-dependencies-changed | Whether generated dependencies have changed ("true"/"false") | false | | +| has-migrations | Whether the PR has migrations ("true"/"false") | false | | +| hatch-build-changed | When hatch build.py changed in the PR. | false | | +| helm-test-packages-list-as-string | List of helm packages to test as JSON array | \["airflow_aux", "airflow_core"\] | * | +| helm-version | Which Helm version to use for tests | v3.15.3 | | +| include-success-outputs | Whether to include outputs of successful parallel tests ("true"/"false") | false | | +| individual-providers-test-types-list-as-strings-in-json | Which test types should be run for unit tests for providers (individually listed) | Providers[\amazon\] Providers\[google\] | * | +| is-committer-build | Whether the build is triggered by a committer | false | | +| is-legacy-ui-api-labeled | Whether the PR is labeled as legacy UI/API | false | | +| kind-version | Which Kind version to use for tests | v0.24.0 | | +| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.10-v1.25.2 3.11-v1.28.13 | * | +| kubernetes-versions | All Kubernetes versions to use for tests as JSON array | \['v1.25.2'\] | | +| kubernetes-versions-list-as-string | All Kubernetes versions to use for tests as space-separated string | v1.25.2 | * | +| latest-versions-only | If set, the number of Python, Kubernetes, DB versions will be limited to the latest ones. | false | | +| mypy-checks | List of folders to be considered for mypy checks | \["airflow_aux", "airflow_core"\] | | +| mysql-exclude | Which versions of MySQL to exclude for tests as JSON array | [] | | +| mysql-versions | Which versions of MySQL to use for tests as JSON array | \['8.0'\] | | +| needs-api-codegen | Whether "api-codegen" are needed to run ("true"/"false") | true | | +| needs-api-tests | Whether "api-tests" are needed to run ("true"/"false") | true | | +| needs-helm-tests | Whether Helm tests are needed to run ("true"/"false") | true | | +| needs-javascript-scans | Whether javascript CodeQL scans should be run ("true"/"false") | true | | +| needs-mypy | Whether mypy check is supposed to run in this build | true | | +| needs-python-scans | Whether Python CodeQL scans should be run ("true"/"false") | true | | +| only-new-ui-files | Whether only new UI files are present in the PR ("true"/"false") | false | | +| postgres-exclude | Which versions of Postgres to exclude for tests as JSON array | [] | | +| postgres-versions | Which versions of Postgres to use for tests as JSON array | \['12'\] | | +| prod-image-build | Whether PROD image build is needed | true | | +| providers-compatibility-tests-matrix | Matrix of providers compatibility tests: (python_version, airflow_version, removed_providers) | \[{}\] | | +| providers-test-types-list-as-strings-in-json | Which test types should be run for unit tests for providers | Providers Providers\[-google\] | * | +| pyproject-toml-changed | When pyproject.toml changed in the PR. | false | | +| python-versions | List of python versions to use for that build | \['3.10'\] | | +| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.10 | * | +| run-amazon-tests | Whether Amazon tests should be run ("true"/"false") | true | | +| run-kubernetes-tests | Whether Kubernetes tests should be run ("true"/"false") | true | | +| run-system-tests | Whether system tests should be run ("true"/"false") | true | | +| run-task-sdk-tests | Whether Task SDK tests should be run ("true"/"false") | true | | +| run-tests | Whether unit tests should be run ("true"/"false") | true | | +| run-ui-tests | Whether UI tests should be run ("true"/"false") | true | | +| run-www-tests | Whether Legacy WWW tests should be run ("true"/"false") | true | | +| amd-runners | List of labels assigned for runners for that build for public AMD runners | \["ubuntu-22.04"\] | | +| arm-runners | List of labels assigned for runners for that build for public ARM runners | \["ubuntu-22.04-arm"\] | | +| selected-providers-list-as-string | List of providers affected when they are selectively affected. | airbyte http | * | +| skip-pre-commits | Which pre-commits should be skipped during the static-checks run | flynt,identity | | +| skip-providers-tests | When provider tests should be skipped (on non-main branch or when no provider changes detected) | true | | +| sqlite-exclude | Which versions of Sqlite to exclude for tests as JSON array | [] | | +| testable-core-integrations | List of core integrations that are testable in the build as JSON array | \['celery', 'kerberos'\] | | +| testable-providers-integrations | List of core integrations that are testable in the build as JSON array | \['mongo', 'kafka'\] | | +| upgrade-to-newer-dependencies | Whether the image build should attempt to upgrade all dependencies (true/false or commit hash) | false | | [1] Note for deciding if `full tests needed` mode is enabled and provider.yaml files. diff --git a/dev/breeze/doc/ci/07_running_ci_locally.md b/dev/breeze/doc/ci/07_running_ci_locally.md index 6724008b8cc99..d71cddac74adc 100644 --- a/dev/breeze/doc/ci/07_running_ci_locally.md +++ b/dev/breeze/doc/ci/07_running_ci_locally.md @@ -76,14 +76,14 @@ this will change soon. To load the image from specific PR, you can use the following command: ```bash -breeze ci-image load --from-pr 12345 --python 3.9 --github-token +breeze ci-image load --from-pr 12345 --python 3.10 --github-token ``` To load the image from specific run (for example 12538475388), you can use the following command, find the run id from GitHub action runs. ```bash -breeze ci-image load --from-run 12538475388 --python 3.9 --github-token +breeze ci-image load --from-run 12538475388 --python 3.10 --github-token ``` After you load the image, you can reproduce the very exact environment that was used in the CI run by diff --git a/dev/breeze/pyproject.toml b/dev/breeze/pyproject.toml index 722ed77772b44..4d22bbc4e4ef7 100644 --- a/dev/breeze/pyproject.toml +++ b/dev/breeze/pyproject.toml @@ -36,7 +36,6 @@ classifiers = [ "Intended Audience :: Developers", "Framework :: Apache Airflow", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -88,7 +87,7 @@ name = "airflow_breeze" [tool.black] line-length = 110 -target-version = ['py39', 'py310', 'py311', 'py312'] +target-version = ['py310', 'py311', 'py312'] [tool.pytest.ini_options] addopts = "-rasl --verbosity=2 -p no:flaky -p no:nose -p no:legacypath" diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py index 64f652ff3cf0c..8fff8b0693a41 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py @@ -194,16 +194,7 @@ def kill_process_group(build_process_group_id: int): def get_exitcode(status: int) -> int: - # In Python 3.9+ we will be able to use - # os.waitstatus_to_exitcode(status) - see https://github.com/python/cpython/issues/84275 - # but until then we need to do this ugly conversion - if os.WIFSIGNALED(status): - return -os.WTERMSIG(status) - if os.WIFEXITED(status): - return os.WEXITSTATUS(status) - if os.WIFSTOPPED(status): - return -os.WSTOPSIG(status) - return 1 + return os.waitstatus_to_exitcode(status) option_upgrade_to_newer_dependencies = click.option( @@ -1005,7 +996,7 @@ def import_mount_cache( make_sure_builder_configured(params=BuildCiParams(builder=builder)) dockerfile = """ # syntax=docker/dockerfile:1.4 - FROM python:3.9-slim-bookworm + FROM python:3.10-slim-bookworm ARG TARGETARCH ARG DEPENDENCY_CACHE_EPOCH= COPY cache.tar.gz /root/.cache.tar.gz diff --git a/dev/check_files.py b/dev/check_files.py index 91861422d29e0..b651c06f3ad38 100644 --- a/dev/check_files.py +++ b/dev/check_files.py @@ -31,7 +31,7 @@ from rich import print PROVIDERS_DOCKER = """\ -FROM ghcr.io/apache/airflow/main/ci/python3.9 +FROM ghcr.io/apache/airflow/main/ci/python3.10 RUN cd airflow-core; uv sync --no-sources # Install providers @@ -39,7 +39,7 @@ """ AIRFLOW_DOCKER = """\ -FROM python:3.9 +FROM python:3.10 # Upgrade RUN pip install "apache-airflow=={}" diff --git a/dev/provider_db_inventory.py b/dev/provider_db_inventory.py index 296938891bf21..2464b45c433b3 100755 --- a/dev/provider_db_inventory.py +++ b/dev/provider_db_inventory.py @@ -18,7 +18,7 @@ # PEP 723 compliant inline script metadata # /// script -# requires-python = ">=3.9" +# requires-python = ">=3.10" # dependencies = [ # "rich", # "pyyaml", diff --git a/dev/refresh_images.sh b/dev/refresh_images.sh index 46700cd75ee4e..9f39a51a87f3f 100755 --- a/dev/refresh_images.sh +++ b/dev/refresh_images.sh @@ -47,7 +47,7 @@ done # #mv -v ./dist/*.whl ./docker-context-files && chmod a+r ./docker-context-files/* # -#for PYTHON in 3.9 3.10 3.11 3.12 +#for PYTHON in 3.10 3.11 3.12 #do # breeze prod-image build \ # --builder airflow_cache \ diff --git a/docker-stack-docs/build-arg-ref.rst b/docker-stack-docs/build-arg-ref.rst index 776d50d280dd3..0e7125d6504b4 100644 --- a/docker-stack-docs/build-arg-ref.rst +++ b/docker-stack-docs/build-arg-ref.rst @@ -27,56 +27,56 @@ Basic arguments Those are the most common arguments that you use when you want to build a custom image. -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| Build argument | Default value | Description | -+==========================================+==========================================+=============================================+ -| ``PYTHON_BASE_IMAGE`` | ``python:3.9-slim-bookworm`` | Base python image. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_VERSION`` | :subst-code:`|airflow-version|` | version of Airflow. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_EXTRAS`` | (see below the table) | Default extras with which Airflow is | -| | | installed. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``ADDITIONAL_AIRFLOW_EXTRAS`` | | Optional additional extras with which | -| | | Airflow is installed. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_HOME`` | ``/opt/airflow`` | Airflow's HOME (that's where logs and | -| | | SQLite databases are stored). | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_USER_HOME_DIR`` | ``/home/airflow`` | Home directory of the Airflow user. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_PIP_VERSION`` | ```` | PIP version used. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_UV_VERSION`` | ```` | UV version used. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_USE_UV`` | ``false`` | Whether to use UV to build the image. | -| | | This is an experimental feature. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``UV_HTTP_TIMEOUT`` | ``300`` | Timeout in seconds for UV pull requests. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``ADDITIONAL_PIP_INSTALL_FLAGS`` | | additional ``pip`` flags passed to the | -| | | installation commands (except when | -| | | reinstalling ``pip`` itself) | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``PIP_PROGRESS_BAR`` | ``on`` | Progress bar for PIP installation | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_UID`` | ``50000`` | Airflow user UID. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_CONSTRAINTS`` | ``constraints`` | Type of constraints to build the image. | -| | | This can be ``constraints`` for regular | -| | | images or ``constraints-no-providers`` for | -| | | slim images. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | | Reference (branch or tag) from GitHub | -| | | where constraints file is taken from | -| | | It can be ``constraints-main`` or | -| | | ``constraints-2-0`` for | -| | | 2.0.* installation. In case of building | -| | | specific version you want to point it | -| | | to specific tag, for example | -| | | :subst-code:`constraints-|airflow-version|`.| -| | | Auto-detected if empty. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| Build argument | Default value | Description | ++==========================================+===========================================+=============================================+ +| ``PYTHON_BASE_IMAGE`` | ``python:3.10-slim-bookworm`` | Base python image. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_VERSION`` | :subst-code:`|airflow-version|` | version of Airflow. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_EXTRAS`` | (see below the table) | Default extras with which Airflow is | +| | | installed. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``ADDITIONAL_AIRFLOW_EXTRAS`` | | Optional additional extras with which | +| | | Airflow is installed. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_HOME`` | ``/opt/airflow`` | Airflow's HOME (that's where logs and | +| | | SQLite databases are stored). | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_USER_HOME_DIR`` | ``/home/airflow`` | Home directory of the Airflow user. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_PIP_VERSION`` | ```` | PIP version used. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_UV_VERSION`` | ```` | UV version used. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_USE_UV`` | ``false`` | Whether to use UV to build the image. | +| | | This is an experimental feature. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``UV_HTTP_TIMEOUT`` | ``300`` | Timeout in seconds for UV pull requests. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``ADDITIONAL_PIP_INSTALL_FLAGS`` | | additional ``pip`` flags passed to the | +| | | installation commands (except when | +| | | reinstalling ``pip`` itself) | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``PIP_PROGRESS_BAR`` | ``on`` | Progress bar for PIP installation | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_UID`` | ``50000`` | Airflow user UID. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_CONSTRAINTS`` | ``constraints`` | Type of constraints to build the image. | +| | | This can be ``constraints`` for regular | +| | | images or ``constraints-no-providers`` for | +| | | slim images. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | | Reference (branch or tag) from GitHub | +| | | where constraints file is taken from | +| | | It can be ``constraints-main`` or | +| | | ``constraints-2-0`` for | +| | | 2.0.* installation. In case of building | +| | | specific version you want to point it | +| | | to specific tag, for example | +| | | :subst-code:`constraints-|airflow-version|`.| +| | | Auto-detected if empty. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ .. note:: diff --git a/docker-stack-docs/build.rst b/docker-stack-docs/build.rst index 2876cbe319b85..620392b360e03 100644 --- a/docker-stack-docs/build.rst +++ b/docker-stack-docs/build.rst @@ -215,7 +215,7 @@ In the simplest case building your image consists of those steps: 1) Create your own ``Dockerfile`` (name it ``Dockerfile``) where you add: -* information what your image should be based on (for example ``FROM: apache/airflow:|airflow-version|-python3.9`` +* information what your image should be based on (for example ``FROM: apache/airflow:|airflow-version|-python3.10`` * additional steps that should be executed in your image (typically in the form of ``RUN ``) @@ -313,17 +313,17 @@ There are two types of images you can extend your image from: Naming conventions for the images: -+----------------+-----------------------+---------------------------------+--------------------------------------+ -| Image | Python | Standard image | Slim image | -+================+=======================+=================================+======================================+ -| Latest default | 3.12 | apache/airflow:latest | apache/airflow:slim-latest | -+----------------+-----------------------+---------------------------------+--------------------------------------+ -| Default | 3.12 | apache/airflow:X.Y.Z | apache/airflow:slim-X.Y.Z | -+----------------+-----------------------+---------------------------------+--------------------------------------+ -| Latest | 3.9,3.10,3.11,3.12 | apache/airflow:latest-pythonN.M | apache/airflow:slim-latest-pythonN.M | -+----------------+-----------------------+---------------------------------+--------------------------------------+ -| Specific | 3.9,3.10,3.11,3.12 | apache/airflow:X.Y.Z-pythonN.M | apache/airflow:slim-X.Y.Z-pythonN.M | -+----------------+-----------------------+---------------------------------+--------------------------------------+ ++----------------+-------------------+---------------------------------+--------------------------------------+ +| Image | Python | Standard image | Slim image | ++================+===================+=================================+======================================+ +| Latest default | 3.12 | apache/airflow:latest | apache/airflow:slim-latest | ++----------------+-------------------+---------------------------------+--------------------------------------+ +| Default | 3.12 | apache/airflow:X.Y.Z | apache/airflow:slim-X.Y.Z | ++----------------+-------------------+---------------------------------+--------------------------------------+ +| Latest | 3.10,3.11,3.12 | apache/airflow:latest-pythonN.M | apache/airflow:slim-latest-pythonN.M | ++----------------+-------------------+---------------------------------+--------------------------------------+ +| Specific | 3.10,3.11,3.12 | apache/airflow:X.Y.Z-pythonN.M | apache/airflow:slim-X.Y.Z-pythonN.M | ++----------------+-------------------+---------------------------------+--------------------------------------+ * The "latest" image is always the latest released stable version available. @@ -714,7 +714,7 @@ Building from PyPI packages This is the basic way of building the custom images from sources. -The following example builds the production image in version ``3.9`` with latest PyPI-released Airflow, +The following example builds the production image in version ``3.10`` with latest PyPI-released Airflow, with default set of Airflow extras and dependencies. The latest PyPI-released Airflow constraints are used automatically. .. exampleinclude:: docker-examples/customizing/stable-airflow.sh @@ -722,7 +722,7 @@ with default set of Airflow extras and dependencies. The latest PyPI-released Ai :start-after: [START build] :end-before: [END build] -The following example builds the production image in version ``3.9`` with default extras from ``2.3.0`` Airflow +The following example builds the production image in version ``3.10`` with default extras from ``2.3.0`` Airflow package. The ``2.3.0`` constraints are used automatically. .. exampleinclude:: docker-examples/customizing/pypi-selected-version.sh @@ -730,7 +730,7 @@ package. The ``2.3.0`` constraints are used automatically. :start-after: [START build] :end-before: [END build] -The following example builds the production image in version ``3.9`` with additional Airflow extras +The following example builds the production image in version ``3.10`` with additional Airflow extras (``mssql,hdfs``) from ``2.3.0`` PyPI package, and additional dependency (``oauth2client``). .. exampleinclude:: docker-examples/customizing/pypi-extras-and-deps.sh @@ -757,7 +757,7 @@ have more complex dependencies to build. Building optimized images ......................... -The following example builds the production image in version ``3.9`` with additional Airflow extras from +The following example builds the production image in version ``3.10`` with additional Airflow extras from PyPI package but it includes additional apt dev and runtime dependencies. The dev dependencies are those that require ``build-essential`` and usually need to involve recompiling @@ -815,7 +815,7 @@ a branch or tag in your repository and use the tag or branch in the URL that you In case of GitHub builds you need to pass the constraints reference manually in case you want to use specific constraints, otherwise the default ``constraints-main`` is used. -The following example builds the production image in version ``3.9`` with default extras from the latest main version and +The following example builds the production image in version ``3.10`` with default extras from the latest main version and constraints are taken from latest version of the constraints-main branch in GitHub. .. exampleinclude:: docker-examples/customizing/github-main.sh diff --git a/docker-stack-docs/docker-examples/customizing/add-build-essential-custom.sh b/docker-stack-docs/docker-examples/customizing/add-build-essential-custom.sh index 9426d8925045b..c19be12d4e5a3 100755 --- a/docker-stack-docs/docker-examples/customizing/add-build-essential-custom.sh +++ b/docker-stack-docs/docker-examples/customizing/add-build-essential-custom.sh @@ -31,7 +31,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_PYTHON_DEPS="mpi4py==3.1.6" \ --build-arg ADDITIONAL_DEV_APT_DEPS="libopenmpi-dev" \ diff --git a/docker-stack-docs/docker-examples/customizing/custom-sources.sh b/docker-stack-docs/docker-examples/customizing/custom-sources.sh index 311ba5c4d33ef..33d7bb7cff313 100755 --- a/docker-stack-docs/docker-examples/customizing/custom-sources.sh +++ b/docker-stack-docs/docker-examples/customizing/custom-sources.sh @@ -32,7 +32,7 @@ export DOCKER_BUILDKIT=1 docker build . -f Dockerfile \ --pull \ --platform 'linux/amd64' \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack,odbc" \ --build-arg ADDITIONAL_PYTHON_DEPS=" \ diff --git a/docker-stack-docs/docker-examples/customizing/github-different-repository.sh b/docker-stack-docs/docker-examples/customizing/github-different-repository.sh index 30d4c40ef808a..82a7e9870fd38 100755 --- a/docker-stack-docs/docker-examples/customizing/github-different-repository.sh +++ b/docker-stack-docs/docker-examples/customizing/github-different-repository.sh @@ -29,7 +29,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow @ https://github.com/potiuk/airflow/archive/main.tar.gz" \ --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-main" \ --build-arg CONSTRAINTS_GITHUB_REPOSITORY="potiuk/airflow" \ diff --git a/docker-stack-docs/docker-examples/customizing/github-main.sh b/docker-stack-docs/docker-examples/customizing/github-main.sh index 19ed9e7955da8..8b4e55523fd81 100755 --- a/docker-stack-docs/docker-examples/customizing/github-main.sh +++ b/docker-stack-docs/docker-examples/customizing/github-main.sh @@ -30,7 +30,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow @ https://github.com/apache/airflow/archive/main.tar.gz" \ --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-main" \ --tag "my-github-main:0.0.1" diff --git a/docker-stack-docs/docker-examples/customizing/github-v2-2-test.sh b/docker-stack-docs/docker-examples/customizing/github-v2-2-test.sh index 14a0cb01c75ee..88e5bb7f55ec6 100755 --- a/docker-stack-docs/docker-examples/customizing/github-v2-2-test.sh +++ b/docker-stack-docs/docker-examples/customizing/github-v2-2-test.sh @@ -31,7 +31,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow @ https://github.com/apache/airflow/archive/v2-2-test.tar.gz" \ --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-2" \ --tag "my-github-v2-2:0.0.1" diff --git a/docker-stack-docs/docker-examples/customizing/pypi-dev-runtime-deps.sh b/docker-stack-docs/docker-examples/customizing/pypi-dev-runtime-deps.sh index 64be44ba6a217..aa9929d97216a 100755 --- a/docker-stack-docs/docker-examples/customizing/pypi-dev-runtime-deps.sh +++ b/docker-stack-docs/docker-examples/customizing/pypi-dev-runtime-deps.sh @@ -32,7 +32,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \ diff --git a/docker-stack-docs/docker-examples/customizing/pypi-extras-and-deps.sh b/docker-stack-docs/docker-examples/customizing/pypi-extras-and-deps.sh index 4d3baa7735cc1..c86b2385289d5 100755 --- a/docker-stack-docs/docker-examples/customizing/pypi-extras-and-deps.sh +++ b/docker-stack-docs/docker-examples/customizing/pypi-extras-and-deps.sh @@ -31,7 +31,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="mssql,hdfs" \ --build-arg ADDITIONAL_PYTHON_DEPS="oauth2client" \ diff --git a/docker-stack-docs/docker-examples/customizing/pypi-selected-version.sh b/docker-stack-docs/docker-examples/customizing/pypi-selected-version.sh index 8b4b69a4db1f2..182b61f255e90 100755 --- a/docker-stack-docs/docker-examples/customizing/pypi-selected-version.sh +++ b/docker-stack-docs/docker-examples/customizing/pypi-selected-version.sh @@ -30,7 +30,7 @@ export AIRFLOW_VERSION=2.3.4 export DOCKER_BUILDKIT=1 docker build . \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --tag "my-pypi-selected-version:0.0.1" # [END build] diff --git a/docker-stack-docs/docker-examples/restricted/restricted_environments.sh b/docker-stack-docs/docker-examples/restricted/restricted_environments.sh index a41ebacf77156..d4e76e4ea4168 100755 --- a/docker-stack-docs/docker-examples/restricted/restricted_environments.sh +++ b/docker-stack-docs/docker-examples/restricted/restricted_environments.sh @@ -29,8 +29,8 @@ mkdir -p docker-context-files export AIRFLOW_VERSION="2.5.3" rm docker-context-files/*.whl docker-context-files/*.tar.gz docker-context-files/*.txt || true -curl -Lo "docker-context-files/constraints-3.9.txt" \ - "https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-3.9.txt" +curl -Lo "docker-context-files/constraints-3.10.txt" \ + "https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-3.10.txt" echo echo "Make sure you use the right python version here (should be same as in constraints)!" @@ -38,7 +38,7 @@ echo python --version pip download --dest docker-context-files \ - --constraint docker-context-files/constraints-3.9.txt \ + --constraint docker-context-files/constraints-3.10.txt \ "apache-airflow[async,celery,elasticsearch,kubernetes,postgres,redis,ssh,statsd,virtualenv]==${AIRFLOW_VERSION}" # [END download] @@ -47,7 +47,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg INSTALL_MYSQL_CLIENT="false" \ @@ -55,7 +55,7 @@ docker build . \ --build-arg INSTALL_POSTGRES_CLIENT="true" \ --build-arg DOCKER_CONTEXT_FILES="docker-context-files" \ --build-arg INSTALL_DISTRIBUTIONS_FROM_CONTEXT="true" \ - --build-arg AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-3.9.txt" \ + --build-arg AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-3.10.txt" \ --tag airflow-my-restricted-environment:0.0.1 # [END build] diff --git a/docker-stack-docs/entrypoint.rst b/docker-stack-docs/entrypoint.rst index 311ae098bb53e..3ddc7d479e6ba 100644 --- a/docker-stack-docs/entrypoint.rst +++ b/docker-stack-docs/entrypoint.rst @@ -132,7 +132,7 @@ if you specify extra arguments. For example: .. code-block:: bash - docker run -it apache/airflow:3.1.0-python3.9 bash -c "ls -la" + docker run -it apache/airflow:3.1.0-python3.10 bash -c "ls -la" total 16 drwxr-xr-x 4 airflow root 4096 Jun 5 18:12 . drwxr-xr-x 1 root root 4096 Jun 5 18:12 .. @@ -144,7 +144,7 @@ you pass extra parameters. For example: .. code-block:: bash - > docker run -it apache/airflow:3.1.0-python3.9 python -c "print('test')" + > docker run -it apache/airflow:3.1.0-python3.10 python -c "print('test')" test If first argument equals to ``airflow`` - the rest of the arguments is treated as an Airflow command @@ -152,13 +152,13 @@ to execute. Example: .. code-block:: bash - docker run -it apache/airflow:3.1.0-python3.9 airflow webserver + docker run -it apache/airflow:3.1.0-python3.10 airflow webserver If there are any other arguments - they are simply passed to the "airflow" command .. code-block:: bash - > docker run -it apache/airflow:3.1.0-python3.9 help + > docker run -it apache/airflow:3.1.0-python3.10 help usage: airflow [-h] GROUP_OR_COMMAND ... positional arguments: @@ -363,7 +363,7 @@ database and creating an ``admin/admin`` Admin user with the following command: --env "_AIRFLOW_DB_MIGRATE=true" \ --env "_AIRFLOW_WWW_USER_CREATE=true" \ --env "_AIRFLOW_WWW_USER_PASSWORD=admin" \ - apache/airflow:3.1.0-python3.9 webserver + apache/airflow:3.1.0-python3.10 webserver .. code-block:: bash @@ -372,7 +372,7 @@ database and creating an ``admin/admin`` Admin user with the following command: --env "_AIRFLOW_DB_MIGRATE=true" \ --env "_AIRFLOW_WWW_USER_CREATE=true" \ --env "_AIRFLOW_WWW_USER_PASSWORD_CMD=echo admin" \ - apache/airflow:3.1.0-python3.9 webserver + apache/airflow:3.1.0-python3.10 webserver The commands above perform initialization of the SQLite database, create admin user with admin password and Admin role. They also forward local port ``8080`` to the webserver port and finally start the webserver. @@ -412,6 +412,6 @@ Example: --env "_AIRFLOW_DB_MIGRATE=true" \ --env "_AIRFLOW_WWW_USER_CREATE=true" \ --env "_AIRFLOW_WWW_USER_PASSWORD_CMD=echo admin" \ - apache/airflow:3.1.0-python3.9 webserver + apache/airflow:3.1.0-python3.10 webserver This method is only available starting from Docker image of Airflow 2.1.1 and above. diff --git a/docker-stack-docs/pyproject.toml b/docker-stack-docs/pyproject.toml index 2258fa887542c..c59a56f8a08e2 100644 --- a/docker-stack-docs/pyproject.toml +++ b/docker-stack-docs/pyproject.toml @@ -24,7 +24,7 @@ build-backend = "hatchling.build" [project] name = "apache-airflow-docker-stack" description = "Programmatically author, schedule and monitor data pipelines" -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] @@ -40,7 +40,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", diff --git a/docker-tests/pyproject.toml b/docker-tests/pyproject.toml index c242dc53e2789..b0ab22e2123de 100644 --- a/docker-tests/pyproject.toml +++ b/docker-tests/pyproject.toml @@ -26,7 +26,7 @@ description = "Docker tests for Apache Airflow" classifiers = [ "Private :: Do Not Upload", ] -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] diff --git a/docker-tests/tests/docker_tests/constants.py b/docker-tests/tests/docker_tests/constants.py index 99ffb351c2dab..1ec467343d86d 100644 --- a/docker-tests/tests/docker_tests/constants.py +++ b/docker-tests/tests/docker_tests/constants.py @@ -21,6 +21,6 @@ AIRFLOW_ROOT_PATH = Path(__file__).resolve().parents[3] -DEFAULT_PYTHON_MAJOR_MINOR_VERSION = "3.9" +DEFAULT_PYTHON_MAJOR_MINOR_VERSION = "3.10" DEFAULT_DOCKER_IMAGE = f"ghcr.io/apache/airflow/main/prod/python{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}:latest" DOCKER_IMAGE = os.environ.get("DOCKER_IMAGE") or DEFAULT_DOCKER_IMAGE diff --git a/docker-tests/tests/docker_tests/docker_utils.py b/docker-tests/tests/docker_tests/docker_utils.py index e8a67c425b163..e2c2a42165626 100644 --- a/docker-tests/tests/docker_tests/docker_utils.py +++ b/docker-tests/tests/docker_tests/docker_utils.py @@ -87,11 +87,11 @@ def display_dependency_conflict_message(): CI image: - breeze ci-image build --upgrade-to-newer-dependencies --python 3.9 + breeze ci-image build --upgrade-to-newer-dependencies --python 3.10 Production image: - breeze ci-image build --production-image --upgrade-to-newer-dependencies --python 3.9 + breeze ci-image build --production-image --upgrade-to-newer-dependencies --python 3.10 ***** End of the instructions **** """ diff --git a/generated/PYPI_README.md b/generated/PYPI_README.md index def66279f2164..6306d1c0efe98 100644 --- a/generated/PYPI_README.md +++ b/generated/PYPI_README.md @@ -131,14 +131,14 @@ them to the appropriate format and workflow that your tool requires. ```bash pip install 'apache-airflow==3.0.2' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.2/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.2/constraints-3.10.txt" ``` 2. Installing with extras (i.e., postgres, google) ```bash pip install 'apache-airflow[postgres,google]==3.0.2' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.2/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-3.0.2/constraints-3.10.txt" ``` For information on installing provider distributions, check diff --git a/helm-tests/pyproject.toml b/helm-tests/pyproject.toml index b370c400b61f2..1a718a5b95c87 100644 --- a/helm-tests/pyproject.toml +++ b/helm-tests/pyproject.toml @@ -26,7 +26,7 @@ description = "Helm tests for Apache Airflow" classifiers = [ "Private :: Do Not Upload", ] -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] diff --git a/kubernetes-tests/pyproject.toml b/kubernetes-tests/pyproject.toml index 0a161cceae980..7d912622d9346 100644 --- a/kubernetes-tests/pyproject.toml +++ b/kubernetes-tests/pyproject.toml @@ -24,7 +24,7 @@ description = "Kubernetes tests for Apache Airflow" classifiers = [ "Private :: Do Not Upload", ] -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] diff --git a/kubernetes-tests/tests/kubernetes_tests/test_kubernetes_pod_operator.py b/kubernetes-tests/tests/kubernetes_tests/test_kubernetes_pod_operator.py index 12a4ee5bb24fe..afbcf113932a6 100644 --- a/kubernetes-tests/tests/kubernetes_tests/test_kubernetes_pod_operator.py +++ b/kubernetes-tests/tests/kubernetes_tests/test_kubernetes_pod_operator.py @@ -1485,7 +1485,7 @@ def __getattr__(self, name): task = KubernetesPodOperator( task_id="dry_run_demo", name="hello-dry-run", - image="python:3.9-slim-buster", + image="python:3.10-slim-buster", cmds=["printenv"], env_vars=[ V1EnvVar(name="password", value="{{ password }}"), diff --git a/providers-summary-docs/pyproject.toml b/providers-summary-docs/pyproject.toml index 4ec9fa33e71d2..b6ce35f9b4f1e 100644 --- a/providers-summary-docs/pyproject.toml +++ b/providers-summary-docs/pyproject.toml @@ -24,7 +24,7 @@ build-backend = "hatchling.build" [project] name = "apache-airflow-providers" description = "Programmatically author, schedule and monitor data pipelines" -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] @@ -40,7 +40,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", diff --git a/providers/airbyte/README.rst b/providers/airbyte/README.rst index eb26b838ae4c1..3dcd839447130 100644 --- a/providers/airbyte/README.rst +++ b/providers/airbyte/README.rst @@ -45,7 +45,7 @@ You can install this package on top of an existing Airflow 2 installation (see ` for the minimum Airflow version supported) via ``pip install apache-airflow-providers-airbyte`` -The package supports the following python versions: 3.9,3.10,3.11,3.12 +The package supports the following python versions: 3.10,3.11,3.12 Requirements ------------ diff --git a/providers/alibaba/README.rst b/providers/alibaba/README.rst index d8ca8bd2672f9..c1371f27cbe28 100644 --- a/providers/alibaba/README.rst +++ b/providers/alibaba/README.rst @@ -45,7 +45,7 @@ You can install this package on top of an existing Airflow 2 installation (see ` for the minimum Airflow version supported) via ``pip install apache-airflow-providers-alibaba`` -The package supports the following python versions: 3.9,3.10,3.11,3.12 +The package supports the following python versions: 3.10,3.11,3.12 Requirements ------------ diff --git a/providers/amazon/docs/executors/general.rst b/providers/amazon/docs/executors/general.rst index 4928e2d8f7a26..9edcc7bd1aa60 100644 --- a/providers/amazon/docs/executors/general.rst +++ b/providers/amazon/docs/executors/general.rst @@ -142,9 +142,9 @@ which is running the Airflow scheduler process (and thus, the |executorName| executor.) Apache Airflow images with specific python versions can be downloaded from the Dockerhub registry, and filtering tags by the `python -version `__. -For example, the tag ``latest-python3.9`` specifies that the image will -have python 3.9 installed. +version `__. +For example, the tag ``latest-python3.10`` specifies that the image will +have python 3.10 installed. .. END BASE_IMAGE diff --git a/providers/edge3/docs/install_on_windows.rst b/providers/edge3/docs/install_on_windows.rst index 850de56ee7cdb..7144800e9b200 100644 --- a/providers/edge3/docs/install_on_windows.rst +++ b/providers/edge3/docs/install_on_windows.rst @@ -29,7 +29,7 @@ Install Edge Worker on Windows The setup was tested on Windows 10 with Python 3.12.8, 64-bit. Backend for tests was Airflow 2.10.5. To setup a instance of Edge Worker on Windows, you need to follow the steps below: -1. Install Python 3.9 or higher. +1. Install Python 3.10 or higher. 2. Create an empty folder as base to start with. In our example it is ``C:\Airflow``. 3. Start Shell/Command Line in ``C:\Airflow`` and create a new virtual environment via: ``python -m venv venv`` 4. Activate the virtual environment via: ``venv\Scripts\activate.bat`` diff --git a/scripts/ci/airflow_version_check.py b/scripts/ci/airflow_version_check.py index 45354d298847f..467f71442ce09 100755 --- a/scripts/ci/airflow_version_check.py +++ b/scripts/ci/airflow_version_check.py @@ -17,7 +17,7 @@ # under the License. # /// script -# requires-python = ">=3.9" +# requires-python = ">=3.10" # dependencies = [ # "packaging>=23.2", # "requests>=2.28.1", diff --git a/scripts/ci/docker-compose/devcontainer.env b/scripts/ci/docker-compose/devcontainer.env index 8f7ae95aba554..f639e6882d814 100644 --- a/scripts/ci/docker-compose/devcontainer.env +++ b/scripts/ci/docker-compose/devcontainer.env @@ -15,10 +15,10 @@ # specific language governing permissions and limitations # under the License. HOME= -AIRFLOW_CI_IMAGE="ghcr.io/apache/airflow/main/ci/python3.9:latest" +AIRFLOW_CI_IMAGE="ghcr.io/apache/airflow/main/ci/python3.10:latest" ANSWER= AIRFLOW_ENV="development" -PYTHON_MAJOR_MINOR_VERSION="3.9" +PYTHON_MAJOR_MINOR_VERSION="3.10" AIRFLOW_EXTRAS= BASE_BRANCH="main" BREEZE="true" diff --git a/scripts/ci/docker-compose/devcontainer.yml b/scripts/ci/docker-compose/devcontainer.yml index f3f77f253f57c..2d0d918ea8210 100644 --- a/scripts/ci/docker-compose/devcontainer.yml +++ b/scripts/ci/docker-compose/devcontainer.yml @@ -19,7 +19,7 @@ services: airflow: stdin_open: true # docker run -i tty: true # docker run -t - image: ghcr.io/apache/airflow/main/ci/python3.9 + image: ghcr.io/apache/airflow/main/ci/python3.10 env_file: devcontainer.env ports: - "22:22" diff --git a/scripts/ci/pre_commit/check_min_python_version.py b/scripts/ci/pre_commit/check_min_python_version.py index 825b899241816..a7b4fbbd6f19e 100755 --- a/scripts/ci/pre_commit/check_min_python_version.py +++ b/scripts/ci/pre_commit/check_min_python_version.py @@ -26,7 +26,7 @@ from common_precommit_utils import console # update this version when we switch to a newer version of Python -required_version = tuple(map(int, "3.9".split("."))) +required_version = tuple(map(int, "3.10".split("."))) required_version_str = f"{required_version[0]}.{required_version[1]}" global_version = tuple( map( diff --git a/scripts/ci/pre_commit/update_providers_dependencies.py b/scripts/ci/pre_commit/update_providers_dependencies.py index 5397692c03669..5266bfc1d98ea 100755 --- a/scripts/ci/pre_commit/update_providers_dependencies.py +++ b/scripts/ci/pre_commit/update_providers_dependencies.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. # /// script -# requires-python = ">=3.9" +# requires-python = ">=3.10" # dependencies = [ # "rich>=12.4.4", # "pyyaml>=6.0.2", diff --git a/scripts/docker/entrypoint_ci.sh b/scripts/docker/entrypoint_ci.sh index 9dcb3ee65837f..c03a0dd38e0c8 100755 --- a/scripts/docker/entrypoint_ci.sh +++ b/scripts/docker/entrypoint_ci.sh @@ -39,7 +39,7 @@ chmod 1777 /tmp AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd) -PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.9} +PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.10} export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} diff --git a/scripts/in_container/install_development_dependencies.py b/scripts/in_container/install_development_dependencies.py index 1b887c3d4bcd7..7658472f5829b 100755 --- a/scripts/in_container/install_development_dependencies.py +++ b/scripts/in_container/install_development_dependencies.py @@ -17,7 +17,7 @@ # specific language governing permissions and limitations # under the License. # /// script -# requires-python = ">=3.9" +# requires-python = ">=3.10" # dependencies = [ # "packaging>=23.2", # "click>=8.1.8", diff --git a/scripts/in_container/run_generate_constraints.py b/scripts/in_container/run_generate_constraints.py index 58cf5cb6afc00..03caaabbde10d 100755 --- a/scripts/in_container/run_generate_constraints.py +++ b/scripts/in_container/run_generate_constraints.py @@ -83,7 +83,7 @@ # commands that might change the installed version of apache-airflow should include "apache-airflow==X.Y.Z" # in the list of install targets to prevent Airflow accidental upgrade or downgrade. # -# Typical installation process of airflow for Python 3.9 is (with random selection of extras and custom +# Typical installation process of airflow for Python 3.10 is (with random selection of extras and custom # dependencies added), usually consists of two steps: # # 1. Reproducible installation of airflow with selected providers (note constraints are used): diff --git a/setup_idea.py b/setup_idea.py index 14c6fe81de343..ec6b82d7e0f1f 100755 --- a/setup_idea.py +++ b/setup_idea.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. # /// script -# requires-python = ">=3.9" +# requires-python = ">=3.10" # dependencies = [ # "rich>=13.6.0", # ] @@ -45,7 +45,7 @@ - + diff --git a/task-sdk/pyproject.toml b/task-sdk/pyproject.toml index 468a6308e6032..fb2c514349556 100644 --- a/task-sdk/pyproject.toml +++ b/task-sdk/pyproject.toml @@ -21,7 +21,7 @@ dynamic = ["version"] description = "Python Task SDK for Apache Airflow DAG Authors" readme = { file = "README.md", content-type = "text/markdown" } license-files.globs = ["LICENSE"] -requires-python = ">=3.9, <3.13" +requires-python = ">=3.10, <3.13" authors = [ {name="Apache Software Foundation", email="dev@airflow.apache.org"}, @@ -38,7 +38,6 @@ classifiers = [ "Intended Audience :: System Administrators", "Framework :: Apache Airflow", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -169,14 +168,14 @@ enum-field-as-literal='one' # When a single enum member, make it output a `Liter input-file-type='openapi' output-model-type='pydantic_v2.BaseModel' output-datetime-class='AwareDatetime' -target-python-version='3.9' +target-python-version='3.10' use-annotated=true use-default=true use-double-quotes=true use-schema-description=true # Desc becomes class doc comment use-standard-collections=true # list[] not List[] use-subclass-enum=true # enum, not union of Literals -use-union-operator=true # 3.9+annotations, not `Union[]` +use-union-operator=true # annotations, not `Union[]` custom-formatters = ['datamodel_code_formatter'] url = 'http://0.0.0.0:8080/execution/openapi.json' diff --git a/task-sdk/src/airflow/sdk/bases/decorator.py b/task-sdk/src/airflow/sdk/bases/decorator.py index c1063774220de..fb77b079a3386 100644 --- a/task-sdk/src/airflow/sdk/bases/decorator.py +++ b/task-sdk/src/airflow/sdk/bases/decorator.py @@ -21,9 +21,9 @@ import re import textwrap import warnings -from collections.abc import Collection, Iterator, Mapping, Sequence +from collections.abc import Callable, Collection, Iterator, Mapping, Sequence from functools import cached_property, update_wrapper -from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, Protocol, TypeVar, cast, overload +from typing import TYPE_CHECKING, Any, ClassVar, Generic, Protocol, TypeVar, cast, overload import attr import typing_extensions @@ -424,7 +424,7 @@ def expand_kwargs(self, kwargs: OperatorExpandKwargsArgument, *, strict: bool = ) if isinstance(kwargs, Sequence): for item in kwargs: - if not isinstance(item, (XComArg, Mapping)): + if not isinstance(item, XComArg | Mapping): raise TypeError(f"expected XComArg or list[dict], not {type(kwargs).__name__}") elif not isinstance(kwargs, XComArg): raise TypeError(f"expected XComArg or list[dict], not {type(kwargs).__name__}") diff --git a/task-sdk/src/airflow/sdk/bases/sensor.py b/task-sdk/src/airflow/sdk/bases/sensor.py index ca2b80cb97eb6..a912476322c15 100644 --- a/task-sdk/src/airflow/sdk/bases/sensor.py +++ b/task-sdk/src/airflow/sdk/bases/sensor.py @@ -21,9 +21,9 @@ import hashlib import time import traceback -from collections.abc import Iterable +from collections.abc import Callable, Iterable from datetime import timedelta -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from airflow.configuration import conf from airflow.exceptions import ( @@ -143,7 +143,7 @@ def __init__( def _coerce_poke_interval(poke_interval: float | timedelta) -> timedelta: if isinstance(poke_interval, timedelta): return poke_interval - if isinstance(poke_interval, (int, float)) and poke_interval >= 0: + if isinstance(poke_interval, int | float) and poke_interval >= 0: return timedelta(seconds=poke_interval) raise AirflowException( "Operator arg `poke_interval` must be timedelta object or a non-negative number" @@ -153,7 +153,7 @@ def _coerce_poke_interval(poke_interval: float | timedelta) -> timedelta: def _coerce_timeout(timeout: float | timedelta) -> timedelta: if isinstance(timeout, timedelta): return timeout - if isinstance(timeout, (int, float)) and timeout >= 0: + if isinstance(timeout, int | float) and timeout >= 0: return timedelta(seconds=timeout) raise AirflowException("Operator arg `timeout` must be timedelta object or a non-negative number") @@ -161,14 +161,14 @@ def _coerce_timeout(timeout: float | timedelta) -> timedelta: def _coerce_max_wait(max_wait: float | timedelta | None) -> timedelta | None: if max_wait is None or isinstance(max_wait, timedelta): return max_wait - if isinstance(max_wait, (int, float)) and max_wait >= 0: + if isinstance(max_wait, int | float) and max_wait >= 0: return timedelta(seconds=max_wait) raise AirflowException("Operator arg `max_wait` must be timedelta object or a non-negative number") def _validate_input_values(self) -> None: - if not isinstance(self.poke_interval, (int, float)) or self.poke_interval < 0: + if not isinstance(self.poke_interval, int | float) or self.poke_interval < 0: raise AirflowException("The poke_interval must be a non-negative number") - if not isinstance(self.timeout, (int, float)) or self.timeout < 0: + if not isinstance(self.timeout, int | float) or self.timeout < 0: raise AirflowException("The timeout must be a non-negative number") if self.mode not in self.valid_modes: raise AirflowException( diff --git a/task-sdk/src/airflow/sdk/definitions/_internal/abstractoperator.py b/task-sdk/src/airflow/sdk/definitions/_internal/abstractoperator.py index ec2fefa0a08b4..4180c4edaa4db 100644 --- a/task-sdk/src/airflow/sdk/definitions/_internal/abstractoperator.py +++ b/task-sdk/src/airflow/sdk/definitions/_internal/abstractoperator.py @@ -426,7 +426,7 @@ def _walk_group(group: TaskGroup) -> Iterable[tuple[str, DAGNode]]: for key, child in _walk_group(dag.task_group): if key == self.node_id: continue - if not isinstance(child, (MappedOperator, MappedTaskGroup)): + if not isinstance(child, MappedOperator | MappedTaskGroup): continue if self.node_id in child.upstream_task_ids: yield child diff --git a/task-sdk/src/airflow/sdk/definitions/_internal/expandinput.py b/task-sdk/src/airflow/sdk/definitions/_internal/expandinput.py index b1c0c6ee5f979..00b12e24b399d 100644 --- a/task-sdk/src/airflow/sdk/definitions/_internal/expandinput.py +++ b/task-sdk/src/airflow/sdk/definitions/_internal/expandinput.py @@ -62,21 +62,21 @@ def __str__(self) -> str: def is_mappable(v: Any) -> TypeGuard[OperatorExpandArgument]: from airflow.sdk.definitions.xcom_arg import XComArg - return isinstance(v, (MappedArgument, XComArg, Mapping, Sequence)) and not isinstance(v, str) + return isinstance(v, MappedArgument | XComArg | Mapping | Sequence) and not isinstance(v, str) # To replace tedious isinstance() checks. def _is_parse_time_mappable(v: OperatorExpandArgument) -> TypeGuard[Mapping | Sequence]: from airflow.sdk.definitions.xcom_arg import XComArg - return not isinstance(v, (MappedArgument, XComArg)) + return not isinstance(v, MappedArgument | XComArg) # To replace tedious isinstance() checks. def _needs_run_time_resolution(v: OperatorExpandArgument) -> TypeGuard[MappedArgument | XComArg]: from airflow.sdk.definitions.xcom_arg import XComArg - return isinstance(v, (MappedArgument, XComArg)) + return isinstance(v, MappedArgument | XComArg) @attrs.define(kw_only=True) diff --git a/task-sdk/src/airflow/sdk/definitions/_internal/node.py b/task-sdk/src/airflow/sdk/definitions/_internal/node.py index 21fa4ede5b1c9..177111af541c0 100644 --- a/task-sdk/src/airflow/sdk/definitions/_internal/node.py +++ b/task-sdk/src/airflow/sdk/definitions/_internal/node.py @@ -168,7 +168,7 @@ def _set_relatives( task_object.update_relative(self, not upstream, edge_modifier=edge_modifier) relatives = task_object.leaves if upstream else task_object.roots for task in relatives: - if not isinstance(task, (BaseOperator, MappedOperator)): + if not isinstance(task, BaseOperator | MappedOperator): raise TypeError( f"Relationships can only be set between Operators; received {task.__class__.__name__}" ) diff --git a/task-sdk/src/airflow/sdk/definitions/asset/__init__.py b/task-sdk/src/airflow/sdk/definitions/asset/__init__.py index 9cb913807ee91..3fd9985521b74 100644 --- a/task-sdk/src/airflow/sdk/definitions/asset/__init__.py +++ b/task-sdk/src/airflow/sdk/definitions/asset/__init__.py @@ -23,7 +23,8 @@ import os import urllib.parse import warnings -from typing import TYPE_CHECKING, Any, Callable, ClassVar, Literal, Union, overload +from collections.abc import Callable +from typing import TYPE_CHECKING, Any, ClassVar, Literal, overload import attrs @@ -117,7 +118,7 @@ def to_asset_alias(self) -> AssetAlias: return AssetAlias(name=self.name) -BaseAssetUniqueKey = Union[AssetUniqueKey, AssetAliasUniqueKey] +BaseAssetUniqueKey = AssetUniqueKey | AssetAliasUniqueKey def normalize_noop(parts: SplitResult) -> SplitResult: diff --git a/task-sdk/src/airflow/sdk/definitions/dag.py b/task-sdk/src/airflow/sdk/definitions/dag.py index c0a4230377449..5291a38651486 100644 --- a/task-sdk/src/airflow/sdk/definitions/dag.py +++ b/task-sdk/src/airflow/sdk/definitions/dag.py @@ -25,15 +25,13 @@ import sys import weakref from collections import abc -from collections.abc import Collection, Iterable, MutableSet +from collections.abc import Callable, Collection, Iterable, MutableSet from datetime import datetime, timedelta from inspect import signature from typing import ( TYPE_CHECKING, Any, - Callable, ClassVar, - Union, cast, overload, ) @@ -93,7 +91,7 @@ DagStateChangeCallback = Callable[[Context], None] ScheduleInterval = None | str | timedelta | relativedelta -ScheduleArg = Union[ScheduleInterval, Timetable, BaseAsset, Collection[BaseAsset]] +ScheduleArg = ScheduleInterval | Timetable | BaseAsset | Collection[BaseAsset] _DAG_HASH_ATTRS = frozenset( @@ -124,7 +122,7 @@ def _create_timetable(interval: ScheduleInterval, timezone: Timezone | FixedTime return OnceTimetable() if interval == "@continuous": return ContinuousTimetable() - if isinstance(interval, (timedelta, relativedelta)): + if isinstance(interval, timedelta | relativedelta): if airflow_conf.getboolean("scheduler", "create_cron_data_intervals"): return DeltaDataIntervalTimetable(interval) return DeltaTriggerTimetable(interval) @@ -809,7 +807,7 @@ def partial_subset( direct_upstreams: list[Operator] = [] if include_direct_upstream: for t in itertools.chain(matched_tasks, also_include): - upstream = (u for u in t.upstream_list if isinstance(u, (BaseOperator, MappedOperator))) + upstream = (u for u in t.upstream_list if isinstance(u, BaseOperator | MappedOperator)) direct_upstreams.extend(upstream) # Make sure to not recursively deepcopy the dag or task_group while copying the task. @@ -1284,12 +1282,7 @@ def _run_inline_trigger(trigger): import asyncio async def _run_inline_trigger_main(): - # We can replace it with `return await anext(trigger.run(), default=None)` - # when we drop support for Python 3.9 - try: - return await trigger.run().__anext__() - except StopAsyncIteration: - return None + return await anext(trigger.run(), None) return asyncio.run(_run_inline_trigger_main()) diff --git a/task-sdk/src/airflow/sdk/definitions/deadline.py b/task-sdk/src/airflow/sdk/definitions/deadline.py index 3c775b5064364..2b4a44af0adcf 100644 --- a/task-sdk/src/airflow/sdk/definitions/deadline.py +++ b/task-sdk/src/airflow/sdk/definitions/deadline.py @@ -17,8 +17,9 @@ from __future__ import annotations import logging +from collections.abc import Callable from datetime import datetime, timedelta -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING from airflow.models.deadline import ReferenceModels from airflow.serialization.enums import DagAttributeTypes as DAT, Encoding diff --git a/task-sdk/src/airflow/sdk/definitions/decorators/__init__.py b/task-sdk/src/airflow/sdk/definitions/decorators/__init__.py index b6c3c879faf8c..41a7c2d0bf290 100644 --- a/task-sdk/src/airflow/sdk/definitions/decorators/__init__.py +++ b/task-sdk/src/airflow/sdk/definitions/decorators/__init__.py @@ -16,7 +16,7 @@ # under the License. from __future__ import annotations -from typing import Callable +from collections.abc import Callable from airflow.providers_manager import ProvidersManager from airflow.sdk.bases.decorator import TaskDecorator diff --git a/task-sdk/src/airflow/sdk/definitions/decorators/__init__.pyi b/task-sdk/src/airflow/sdk/definitions/decorators/__init__.pyi index 30e921f2f4881..e60852a3f02d9 100644 --- a/task-sdk/src/airflow/sdk/definitions/decorators/__init__.pyi +++ b/task-sdk/src/airflow/sdk/definitions/decorators/__init__.pyi @@ -20,9 +20,9 @@ # documentation for more details. from __future__ import annotations -from collections.abc import Collection, Container, Iterable, Mapping +from collections.abc import Callable, Collection, Container, Iterable, Mapping from datetime import timedelta -from typing import Any, Callable, TypeVar, overload +from typing import Any, TypeVar, overload from docker.types import Mount from kubernetes.client import models as k8s diff --git a/task-sdk/src/airflow/sdk/definitions/decorators/condition.py b/task-sdk/src/airflow/sdk/definitions/decorators/condition.py index 5ccf6b685d497..2808563ffcfd5 100644 --- a/task-sdk/src/airflow/sdk/definitions/decorators/condition.py +++ b/task-sdk/src/airflow/sdk/definitions/decorators/condition.py @@ -16,14 +16,15 @@ # under the License. from __future__ import annotations +from collections.abc import Callable from functools import wraps -from typing import TYPE_CHECKING, Any, Callable, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar from airflow.exceptions import AirflowSkipException from airflow.sdk.bases.decorator import Task, _TaskDecorator if TYPE_CHECKING: - from typing_extensions import TypeAlias + from typing import TypeAlias from airflow.sdk.bases.operator import TaskPreExecuteHook from airflow.sdk.definitions.context import Context diff --git a/task-sdk/src/airflow/sdk/definitions/decorators/setup_teardown.py b/task-sdk/src/airflow/sdk/definitions/decorators/setup_teardown.py index c53a84ea71bc8..e5e2bf40ee991 100644 --- a/task-sdk/src/airflow/sdk/definitions/decorators/setup_teardown.py +++ b/task-sdk/src/airflow/sdk/definitions/decorators/setup_teardown.py @@ -17,7 +17,8 @@ from __future__ import annotations import types -from typing import TYPE_CHECKING, Callable +from collections.abc import Callable +from typing import TYPE_CHECKING from airflow.exceptions import AirflowException from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS diff --git a/task-sdk/src/airflow/sdk/definitions/decorators/task_group.py b/task-sdk/src/airflow/sdk/definitions/decorators/task_group.py index bb718abdb2021..809f5889e003e 100644 --- a/task-sdk/src/airflow/sdk/definitions/decorators/task_group.py +++ b/task-sdk/src/airflow/sdk/definitions/decorators/task_group.py @@ -28,8 +28,8 @@ import functools import inspect import warnings -from collections.abc import Mapping, Sequence -from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, TypeVar, overload +from collections.abc import Callable, Mapping, Sequence +from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypeVar, overload import attr @@ -144,7 +144,7 @@ def expand(self, **kwargs: OperatorExpandArgument) -> DAGNode: def expand_kwargs(self, kwargs: OperatorExpandKwargsArgument) -> DAGNode: if isinstance(kwargs, Sequence): for item in kwargs: - if not isinstance(item, (XComArg, Mapping)): + if not isinstance(item, XComArg | Mapping): raise TypeError(f"expected XComArg or list[dict], not {type(kwargs).__name__}") elif not isinstance(kwargs, XComArg): raise TypeError(f"expected XComArg or list[dict], not {type(kwargs).__name__}") diff --git a/task-sdk/src/airflow/sdk/definitions/edges.py b/task-sdk/src/airflow/sdk/definitions/edges.py index 39fafc4b932c8..4a52b9df32c4e 100644 --- a/task-sdk/src/airflow/sdk/definitions/edges.py +++ b/task-sdk/src/airflow/sdk/definitions/edges.py @@ -75,7 +75,7 @@ def _save_nodes( from airflow.sdk.definitions.xcom_arg import XComArg for node in self._make_list(nodes): - if isinstance(node, (TaskGroup, XComArg, DAGNode)): + if isinstance(node, TaskGroup | XComArg | DAGNode): stream.append(node) else: raise TypeError( diff --git a/task-sdk/src/airflow/sdk/definitions/mappedoperator.py b/task-sdk/src/airflow/sdk/definitions/mappedoperator.py index 58c03c31b87d7..9f2cc18604354 100644 --- a/task-sdk/src/airflow/sdk/definitions/mappedoperator.py +++ b/task-sdk/src/airflow/sdk/definitions/mappedoperator.py @@ -21,11 +21,12 @@ import copy import warnings from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence -from typing import TYPE_CHECKING, Any, ClassVar, Union +from typing import TYPE_CHECKING, Any, ClassVar import attrs import methodtools +from airflow.models.abstractoperator import TaskStateChangeCallback from airflow.sdk.definitions._internal.abstractoperator import ( DEFAULT_EXECUTOR, DEFAULT_IGNORE_FIRST_DEPENDS_ON_PAST, @@ -60,9 +61,6 @@ import jinja2 # Slow import. import pendulum - from airflow.models.abstractoperator import ( - TaskStateChangeCallback, - ) from airflow.models.expandinput import ( OperatorExpandArgument, OperatorExpandKwargsArgument, @@ -73,7 +71,6 @@ from airflow.sdk.definitions.dag import DAG from airflow.sdk.definitions.param import ParamsDict from airflow.sdk.definitions.xcom_arg import XComArg - from airflow.sdk.types import Operator from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.triggers.base import StartTriggerArgs from airflow.typing_compat import TypeGuard @@ -82,9 +79,8 @@ from airflow.utils.task_group import TaskGroup from airflow.utils.trigger_rule import TriggerRule - TaskStateChangeCallbackAttrType = Union[None, TaskStateChangeCallback, list[TaskStateChangeCallback]] - -ValidationSource = Union[Literal["expand"], Literal["partial"]] +TaskStateChangeCallbackAttrType = TaskStateChangeCallback | list[TaskStateChangeCallback] | None +ValidationSource = Literal["expand"] | Literal["partial"] def validate_mapping_kwargs(op: type[BaseOperator], func: ValidationSource, value: dict[str, Any]) -> None: @@ -144,9 +140,9 @@ def is_mappable_value(value: Any) -> TypeGuard[Collection]: :meta private: """ - if not isinstance(value, (Sequence, dict)): + if not isinstance(value, Sequence | dict): return False - if isinstance(value, (bytearray, bytes, str)): + if isinstance(value, bytearray | bytes | str): return False return True @@ -196,7 +192,7 @@ def expand_kwargs(self, kwargs: OperatorExpandKwargsArgument, *, strict: bool = if isinstance(kwargs, Sequence): for item in kwargs: - if not isinstance(item, (XComArg, Mapping)): + if not isinstance(item, XComArg | Mapping): raise TypeError(f"expected XComArg or list[dict], not {type(kwargs).__name__}") elif not isinstance(kwargs, XComArg): raise TypeError(f"expected XComArg or list[dict], not {type(kwargs).__name__}") @@ -786,7 +782,7 @@ def prepare_for_execution(self) -> MappedOperator: # we don't need to create a copy of the MappedOperator here. return self - def iter_mapped_dependencies(self) -> Iterator[Operator]: + def iter_mapped_dependencies(self) -> Iterator[AbstractOperator]: """Upstream dependencies that provide XComs used by this task for task mapping.""" from airflow.sdk.definitions.xcom_arg import XComArg diff --git a/task-sdk/src/airflow/sdk/definitions/xcom_arg.py b/task-sdk/src/airflow/sdk/definitions/xcom_arg.py index 2a93585304cb0..7a5ed0468e739 100644 --- a/task-sdk/src/airflow/sdk/definitions/xcom_arg.py +++ b/task-sdk/src/airflow/sdk/definitions/xcom_arg.py @@ -20,9 +20,9 @@ import contextlib import inspect import itertools -from collections.abc import Iterable, Iterator, Mapping, Sequence, Sized +from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence, Sized from functools import singledispatch -from typing import TYPE_CHECKING, Any, Callable, overload +from typing import TYPE_CHECKING, Any, overload from airflow.exceptions import AirflowException, XComNotFound from airflow.sdk.definitions._internal.abstractoperator import AbstractOperator @@ -104,7 +104,7 @@ def iter_xcom_references(arg: Any) -> Iterator[tuple[Operator, str]]: """ if isinstance(arg, ResolveMixin): yield from arg.iter_references() - elif isinstance(arg, (tuple, set, list)): + elif isinstance(arg, tuple | set | list): for elem in arg: yield from XComArg.iter_xcom_references(elem) elif isinstance(arg, dict): @@ -429,7 +429,7 @@ def map(self, f: Callable[[Any], Any]) -> MapXComArg: def resolve(self, context: Mapping[str, Any]) -> Any: value = self.arg.resolve(context) - if not isinstance(value, (Sequence, dict)): + if not isinstance(value, Sequence | dict): raise ValueError(f"XCom map expects sequence or dict, not {type(value).__name__}") return _MapResult(value, self.callables) @@ -494,7 +494,7 @@ def iter_references(self) -> Iterator[tuple[Operator, str]]: def resolve(self, context: Mapping[str, Any]) -> Any: values = [arg.resolve(context) for arg in self.args] for value in values: - if not isinstance(value, (Sequence, dict)): + if not isinstance(value, Sequence | dict): raise ValueError(f"XCom zip expects sequence or dict, not {type(value).__name__}") return _ZipResult(values, fillvalue=self.fillvalue) @@ -557,7 +557,7 @@ def concat(self, *others: XComArg) -> ConcatXComArg: def resolve(self, context: Mapping[str, Any]) -> Any: values = [arg.resolve(context) for arg in self.args] for value in values: - if not isinstance(value, (Sequence, dict)): + if not isinstance(value, Sequence | dict): raise ValueError(f"XCom concat expects sequence or dict, not {type(value).__name__}") return _ConcatResult(values) diff --git a/task-sdk/src/airflow/sdk/execution_time/callback_runner.py b/task-sdk/src/airflow/sdk/execution_time/callback_runner.py index f35d76d058915..316c3d38e99b8 100644 --- a/task-sdk/src/airflow/sdk/execution_time/callback_runner.py +++ b/task-sdk/src/airflow/sdk/execution_time/callback_runner.py @@ -20,7 +20,8 @@ import inspect import logging -from typing import TYPE_CHECKING, Callable, Generic, Protocol, TypeVar, cast +from collections.abc import Callable +from typing import TYPE_CHECKING, Generic, Protocol, TypeVar, cast from typing_extensions import ParamSpec diff --git a/task-sdk/src/airflow/sdk/execution_time/comms.py b/task-sdk/src/airflow/sdk/execution_time/comms.py index 97ed1761ad8eb..338f5df23a3bb 100644 --- a/task-sdk/src/airflow/sdk/execution_time/comms.py +++ b/task-sdk/src/airflow/sdk/execution_time/comms.py @@ -54,7 +54,7 @@ from functools import cached_property from pathlib import Path from socket import socket -from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Generic, Literal, TypeVar, Union, overload +from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Generic, Literal, TypeVar, overload from uuid import UUID import attrs @@ -558,27 +558,25 @@ class SentFDs(BaseModel): ToTask = Annotated[ - Union[ - AssetResult, - AssetEventsResult, - ConnectionResult, - DagRunStateResult, - DRCount, - ErrorResponse, - PrevSuccessfulDagRunResult, - SentFDs, - StartupDetails, - TaskRescheduleStartDate, - TICount, - TaskStatesResult, - VariableResult, - XComCountResponse, - XComResult, - XComSequenceIndexResult, - XComSequenceSliceResult, - InactiveAssetsResult, - OKResponse, - ], + AssetResult + | AssetEventsResult + | ConnectionResult + | DagRunStateResult + | DRCount + | ErrorResponse + | PrevSuccessfulDagRunResult + | SentFDs + | StartupDetails + | TaskRescheduleStartDate + | TICount + | TaskStatesResult + | VariableResult + | XComCountResponse + | XComResult + | XComSequenceIndexResult + | XComSequenceSliceResult + | InactiveAssetsResult + | OKResponse, Field(discriminator="type"), ] @@ -841,37 +839,35 @@ class GetDRCount(BaseModel): ToSupervisor = Annotated[ - Union[ - DeferTask, - DeleteXCom, - GetAssetByName, - GetAssetByUri, - GetAssetEventByAsset, - GetAssetEventByAssetAlias, - GetConnection, - GetDagRunState, - GetDRCount, - GetPrevSuccessfulDagRun, - GetTaskRescheduleStartDate, - GetTICount, - GetTaskStates, - GetVariable, - GetXCom, - GetXComCount, - GetXComSequenceItem, - GetXComSequenceSlice, - PutVariable, - RescheduleTask, - RetryTask, - SetRenderedFields, - SetXCom, - SkipDownstreamTasks, - SucceedTask, - ValidateInletsAndOutlets, - TaskState, - TriggerDagRun, - DeleteVariable, - ResendLoggingFD, - ], + DeferTask + | DeleteXCom + | GetAssetByName + | GetAssetByUri + | GetAssetEventByAsset + | GetAssetEventByAssetAlias + | GetConnection + | GetDagRunState + | GetDRCount + | GetPrevSuccessfulDagRun + | GetTaskRescheduleStartDate + | GetTICount + | GetTaskStates + | GetVariable + | GetXCom + | GetXComCount + | GetXComSequenceItem + | GetXComSequenceSlice + | PutVariable + | RescheduleTask + | RetryTask + | SetRenderedFields + | SetXCom + | SkipDownstreamTasks + | SucceedTask + | ValidateInletsAndOutlets + | TaskState + | TriggerDagRun + | DeleteVariable + | ResendLoggingFD, Field(discriminator="type"), ] diff --git a/task-sdk/src/airflow/sdk/execution_time/context.py b/task-sdk/src/airflow/sdk/execution_time/context.py index c76994995ebab..e95b9b173dc90 100644 --- a/task-sdk/src/airflow/sdk/execution_time/context.py +++ b/task-sdk/src/airflow/sdk/execution_time/context.py @@ -529,7 +529,7 @@ def __getitem__(self, key: int | Asset | AssetAlias | AssetRef) -> list[AssetEve msg: ToSupervisor if isinstance(key, int): # Support index access; it's easier for trivial cases. obj = self._inlets[key] - if not isinstance(obj, (Asset, AssetAlias, AssetRef)): + if not isinstance(obj, Asset | AssetAlias | AssetRef): raise IndexError(key) else: obj = key diff --git a/task-sdk/src/airflow/sdk/execution_time/secrets_masker.py b/task-sdk/src/airflow/sdk/execution_time/secrets_masker.py index cfc6214325dc4..560a493d3bc3f 100644 --- a/task-sdk/src/airflow/sdk/execution_time/secrets_masker.py +++ b/task-sdk/src/airflow/sdk/execution_time/secrets_masker.py @@ -23,28 +23,20 @@ import logging import re import sys -from collections.abc import Generator, Iterable, Iterator +from collections.abc import Callable, Generator, Iterable, Iterator from enum import Enum from functools import cache, cached_property from re import Pattern -from typing import ( - TYPE_CHECKING, - Any, - Callable, - TextIO, - TypeVar, - Union, -) +from typing import TYPE_CHECKING, Any, TextIO, TypeAlias, TypeVar from airflow import settings if TYPE_CHECKING: - from kubernetes.client import V1EnvVar - from airflow.typing_compat import TypeGuard -Redactable = TypeVar("Redactable", str, "V1EnvVar", dict[Any, Any], tuple[Any, ...], list[Any]) -Redacted = Union[Redactable, str] +V1EnvVar = TypeVar("V1EnvVar") +Redactable: TypeAlias = str | V1EnvVar | dict[Any, Any] | tuple[Any, ...] | list[Any] +Redacted: TypeAlias = Redactable | str log = logging.getLogger(__name__) @@ -240,7 +232,7 @@ def _redact_all(self, item: Redactable, depth: int, max_depth: int = MAX_RECURSI return { dict_key: self._redact_all(subval, depth + 1, max_depth) for dict_key, subval in item.items() } - if isinstance(item, (tuple, set)): + if isinstance(item, tuple | set): # Turn set in to tuple! return tuple(self._redact_all(subval, depth + 1, max_depth) for subval in item) if isinstance(item, list): @@ -265,7 +257,7 @@ def _redact(self, item: Redactable, name: str | None, depth: int, max_depth: int if isinstance(item, Enum): return self._redact(item=item.value, name=name, depth=depth, max_depth=max_depth) if _is_v1_env_var(item): - tmp: dict = item.to_dict() + tmp: dict = item.to_dict() # type: ignore[attr-defined] # V1EnvVar has a to_dict method if should_hide_value_for_key(tmp.get("name", "")) and "value" in tmp: tmp["value"] = "***" else: @@ -278,7 +270,7 @@ def _redact(self, item: Redactable, name: str | None, depth: int, max_depth: int # the structure. return self.replacer.sub("***", str(item)) return item - if isinstance(item, (tuple, set)): + if isinstance(item, tuple | set): # Turn set in to tuple! return tuple( self._redact(subval, name=None, depth=(depth + 1), max_depth=max_depth) for subval in item @@ -462,7 +454,7 @@ def writable(self) -> bool: return self.target.writable() def write(self, s: str) -> int: - s = redact(s) + s = str(redact(s)) return self.target.write(s) def writelines(self, lines) -> None: diff --git a/task-sdk/src/airflow/sdk/execution_time/supervisor.py b/task-sdk/src/airflow/sdk/execution_time/supervisor.py index 70b384a8c96b2..2bae008a0a6ff 100644 --- a/task-sdk/src/airflow/sdk/execution_time/supervisor.py +++ b/task-sdk/src/airflow/sdk/execution_time/supervisor.py @@ -29,7 +29,7 @@ import time import weakref from collections import deque -from collections.abc import Generator +from collections.abc import Callable, Generator from contextlib import contextmanager, suppress from datetime import datetime, timezone from http import HTTPStatus @@ -37,7 +37,6 @@ from typing import ( TYPE_CHECKING, BinaryIO, - Callable, ClassVar, NoReturn, TextIO, diff --git a/task-sdk/src/airflow/sdk/execution_time/task_runner.py b/task-sdk/src/airflow/sdk/execution_time/task_runner.py index 6c6e597f65e5c..24127d65a2763 100644 --- a/task-sdk/src/airflow/sdk/execution_time/task_runner.py +++ b/task-sdk/src/airflow/sdk/execution_time/task_runner.py @@ -340,8 +340,8 @@ def xcom_pull( if run_id is None: run_id = self.run_id - single_task_requested = isinstance(task_ids, (str, type(None))) - single_map_index_requested = isinstance(map_indexes, (int, type(None))) + single_task_requested = isinstance(task_ids, str | type(None)) + single_map_index_requested = isinstance(map_indexes, int | type(None)) if task_ids is None: # default to the current task if not provided @@ -618,7 +618,7 @@ def parse(what: StartupDetails, log: Logger) -> RuntimeTaskInstance: ) exit(1) - if not isinstance(task, (BaseOperator, MappedOperator)): + if not isinstance(task, BaseOperator | MappedOperator): raise TypeError( f"task is of the wrong type, got {type(task)}, wanted {BaseOperator} or {MappedOperator}" ) diff --git a/task-sdk/src/airflow/sdk/log.py b/task-sdk/src/airflow/sdk/log.py index 554a96cd9e690..46efd6bf448ed 100644 --- a/task-sdk/src/airflow/sdk/log.py +++ b/task-sdk/src/airflow/sdk/log.py @@ -24,9 +24,10 @@ import re import sys import warnings +from collections.abc import Callable from functools import cache from pathlib import Path -from typing import TYPE_CHECKING, Any, BinaryIO, Callable, Generic, TextIO, TypeVar, cast +from typing import TYPE_CHECKING, Any, BinaryIO, Generic, TextIO, TypeVar, cast import msgspec import structlog diff --git a/task-sdk/src/airflow/sdk/types.py b/task-sdk/src/airflow/sdk/types.py index 8bd0ea0db8d4d..b4641000bc64a 100644 --- a/task-sdk/src/airflow/sdk/types.py +++ b/task-sdk/src/airflow/sdk/types.py @@ -19,7 +19,7 @@ import uuid from collections.abc import Iterable -from typing import TYPE_CHECKING, Any, Protocol, Union +from typing import TYPE_CHECKING, Any, Protocol from airflow.sdk.definitions._internal.types import NOTSET, ArgNotSet @@ -33,7 +33,7 @@ from airflow.sdk.definitions.context import Context from airflow.sdk.definitions.mappedoperator import MappedOperator - Operator = Union[BaseOperator, MappedOperator] + Operator = BaseOperator | MappedOperator class DagRunProtocol(Protocol): diff --git a/task-sdk/tests/conftest.py b/task-sdk/tests/conftest.py index 80c71f41a8fe8..f2660faa2d206 100644 --- a/task-sdk/tests/conftest.py +++ b/task-sdk/tests/conftest.py @@ -119,7 +119,7 @@ def captured_logs(request): # We need to replace remove the last processor (the one that turns JSON into text, as we want the # event dict for tests) proc = processors.pop() - assert isinstance(proc, (structlog.dev.ConsoleRenderer, structlog.processors.JSONRenderer)), ( + assert isinstance(proc, structlog.dev.ConsoleRenderer | structlog.processors.JSONRenderer), ( "Pre-condition" ) try: diff --git a/task-sdk/tests/task_sdk/definitions/conftest.py b/task-sdk/tests/task_sdk/definitions/conftest.py index 3f89f34b4d2da..7ad358487ba63 100644 --- a/task-sdk/tests/task_sdk/definitions/conftest.py +++ b/task-sdk/tests/task_sdk/definitions/conftest.py @@ -42,7 +42,7 @@ def run(dag: DAG, task_id: str, map_index: int): for call in mock_supervisor_comms.send.mock_calls: msg = call.kwargs.get("msg") or call.args[0] - if isinstance(msg, (TaskState, SucceedTask)): + if isinstance(msg, TaskState | SucceedTask): return msg.state raise RuntimeError("Unable to find call to TaskState") diff --git a/task-sdk/tests/task_sdk/definitions/test_asset.py b/task-sdk/tests/task_sdk/definitions/test_asset.py index 2a25c0907c7dc..fd70882e96ada 100644 --- a/task-sdk/tests/task_sdk/definitions/test_asset.py +++ b/task-sdk/tests/task_sdk/definitions/test_asset.py @@ -19,7 +19,7 @@ import json import os -from typing import Callable +from collections.abc import Callable from unittest import mock import pytest @@ -244,7 +244,7 @@ def assets_equal(a1: BaseAsset, a2: BaseAsset) -> bool: if isinstance(a1, Asset) and isinstance(a2, Asset): return a1.uri == a2.uri - if isinstance(a1, (AssetAny, AssetAll)) and isinstance(a2, (AssetAny, AssetAll)): + if isinstance(a1, AssetAny | AssetAll) and isinstance(a2, AssetAny | AssetAll): if len(a1.objects) != len(a2.objects): return False diff --git a/task-sdk/tests/task_sdk/definitions/test_mappedoperator.py b/task-sdk/tests/task_sdk/definitions/test_mappedoperator.py index 5c81b64b605b3..6889d7d57c3b0 100644 --- a/task-sdk/tests/task_sdk/definitions/test_mappedoperator.py +++ b/task-sdk/tests/task_sdk/definitions/test_mappedoperator.py @@ -17,8 +17,9 @@ # under the License. from __future__ import annotations +from collections.abc import Callable from datetime import datetime, timedelta -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING from unittest import mock import pendulum diff --git a/task-sdk/tests/task_sdk/definitions/test_xcom_arg.py b/task-sdk/tests/task_sdk/definitions/test_xcom_arg.py index 73468dcb9e12b..521fa99df811f 100644 --- a/task-sdk/tests/task_sdk/definitions/test_xcom_arg.py +++ b/task-sdk/tests/task_sdk/definitions/test_xcom_arg.py @@ -17,7 +17,7 @@ # under the License. from __future__ import annotations -from typing import Callable +from collections.abc import Callable from unittest import mock import pytest