diff --git a/Dockerfile.ci b/Dockerfile.ci index 743ff778a5f37..7f4955df13e54 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -840,7 +840,7 @@ chmod 1777 /tmp AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd) -PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.9} +PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.10} export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} diff --git a/chart/pyproject.toml b/chart/pyproject.toml index 2c559d1997dc8..5daf4f3a7699b 100644 --- a/chart/pyproject.toml +++ b/chart/pyproject.toml @@ -40,7 +40,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", diff --git a/clients/python/pyproject.toml b/clients/python/pyproject.toml index 2cfdeeaad749a..b7f01bb004de6 100644 --- a/clients/python/pyproject.toml +++ b/clients/python/pyproject.toml @@ -42,7 +42,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -73,7 +72,7 @@ run-coverage = "pytest test" run = "run-coverage --no-cov" [[tool.hatch.envs.test.matrix]] -python = ["3.9", "3.10", "3.11"] +python = ["3.10", "3.11"] [tool.hatch.version] path = "./version.txt" diff --git a/clients/python/test_python_client.py b/clients/python/test_python_client.py index bf04d68500ea6..4000686f88229 100644 --- a/clients/python/test_python_client.py +++ b/clients/python/test_python_client.py @@ -17,7 +17,7 @@ # # PEP 723 compliant inline script metadata (not yet widely supported) # /// script -# requires-python = ">=3.9" +# requires-python = ">=3.10" # dependencies = [ # "apache-airflow-client", # "rich", diff --git a/constraints/README.md b/constraints/README.md index 97c03dab0c400..758048a900690 100644 --- a/constraints/README.md +++ b/constraints/README.md @@ -29,12 +29,12 @@ This allows you to iterate on dependencies without having to run `--upgrade-to-n Typical workflow in this case is: * download and copy the constraint file to the folder (for example via -[The GitHub Raw Link](https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt)) +[The GitHub Raw Link](https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.10.txt)) * modify the constraint file in "constraints" folder * build the image using this command ```bash -breeze ci-image build --python 3.9 --airflow-constraints-location constraints/constraints-3.9.txt +breeze ci-image build --python 3.10 --airflow-constraints-location constraints/constraints-3.10.txt ``` You can continue iterating and updating the constraint file (and rebuilding the image) @@ -46,7 +46,7 @@ pip freeze | sort | \ grep -v "apache_airflow" | \ grep -v "apache-airflow==" | \ grep -v "@" | \ - grep -v "/opt/airflow" > /opt/airflow/constraints/constraints-3.9.txt + grep -v "/opt/airflow" > /opt/airflow/constraints/constraints-3.10.txt ``` If you are working with others on updating the dependencies, you can also commit the constraint diff --git a/contributing-docs/03_contributors_quick_start.rst b/contributing-docs/03_contributors_quick_start.rst index c10d4d80ab63c..ccc9a3ecac8b1 100644 --- a/contributing-docs/03_contributors_quick_start.rst +++ b/contributing-docs/03_contributors_quick_start.rst @@ -255,8 +255,8 @@ To avoid burden on our CI infrastructure and to save time, Pre-commit hooks can We have recently started to recommend ``uv`` for our local development. .. note:: - Remember to have global python set to Python >= 3.9 - Python 3.8 is end-of-life already and we've - started to use Python 3.9+ features in Airflow and accompanying scripts. + Remember to have global python set to Python >= 3.10 - Python 3.10 is end-of-life already and we've + started to use Python 3.10+ features in Airflow and accompanying scripts. Installing pre-commit is best done with ``uv`` (recommended) or ``pipx``. @@ -433,7 +433,7 @@ see in CI in your local environment. .. code-block:: bash - breeze --python 3.9 --backend postgres + breeze --python 3.10 --backend postgres .. note:: If you encounter an error like "docker.credentials.errors.InitializationError: @@ -490,7 +490,7 @@ Using Breeze ------------ 1. Starting the Breeze environment using ``breeze start-airflow`` starts the Breeze environment with last configuration run( - In this case Python version and backend are picked up from last execution ``breeze --python 3.9 --backend postgres``) + In this case Python version and backend are picked up from last execution ``breeze --python 3.10 --backend postgres``) It also automatically starts the API server (FastAPI api and UI), triggerer, dag processor and scheduler. It drops you in tmux with triggerer to the right, and Scheduler, API server (FastAPI api and UI), DAG processor from left to right at the bottom. Use ``[Ctrl + B] and Arrow keys`` to navigate. @@ -501,9 +501,9 @@ Using Breeze Use CI image. Branch name: main - Docker image: ghcr.io/apache/airflow/main/ci/python3.9:latest + Docker image: ghcr.io/apache/airflow/main/ci/python3.10:latest Airflow source version: 2.4.0.dev0 - Python version: 3.9 + Python version: 3.10 Backend: mysql 5.7 * Port forwarding: @@ -540,7 +540,7 @@ Using Breeze .. code-block:: bash - breeze --python 3.9 --backend postgres + breeze --python 3.10 --backend postgres 2. Open tmux @@ -614,7 +614,7 @@ If ``breeze`` was started with ``breeze start-airflow``, this command will stop root@f3619b74c59a:/opt/airflow# stop_airflow breeze down -If ``breeze`` was started with ``breeze --python 3.9 --backend postgres`` (or similar): +If ``breeze`` was started with ``breeze --python 3.10 --backend postgres`` (or similar): .. code-block:: bash @@ -674,7 +674,7 @@ All Tests are inside ./tests directory. root@63528318c8b1:/opt/airflow# pytest tests/utils/test_dates.py ============================================================= test session starts ============================================================== - platform linux -- Python 3.9.20, pytest-8.3.3, pluggy-1.5.0 -- /usr/local/bin/python + platform linux -- Python 3.10.20, pytest-8.3.3, pluggy-1.5.0 -- /usr/local/bin/python cachedir: .pytest_cache rootdir: /opt/airflow configfile: pyproject.toml @@ -694,20 +694,20 @@ All Tests are inside ./tests directory. .. code-block:: bash - breeze --backend postgres --postgres-version 15 --python 3.9 --db-reset testing tests --test-type All + breeze --backend postgres --postgres-version 15 --python 3.10 --db-reset testing tests --test-type All - Running specific type of test .. code-block:: bash - breeze --backend postgres --postgres-version 15 --python 3.9 --db-reset testing tests --test-type Core + breeze --backend postgres --postgres-version 15 --python 3.10 --db-reset testing tests --test-type Core - Running Integration test for specific test type .. code-block:: bash - breeze --backend postgres --postgres-version 15 --python 3.9 --db-reset testing tests --test-type All --integration mongo + breeze --backend postgres --postgres-version 15 --python 3.10 --db-reset testing tests --test-type All --integration mongo - For more information on Testing visit |09_testing.rst| diff --git a/contributing-docs/05_pull_requests.rst b/contributing-docs/05_pull_requests.rst index d6e360721d275..5a7a4d96e13b4 100644 --- a/contributing-docs/05_pull_requests.rst +++ b/contributing-docs/05_pull_requests.rst @@ -92,7 +92,7 @@ these guidelines: you can push your code to PR and see results of the tests in the CI. - You can use any supported python version to run the tests, but the best is to check - if it works for the oldest supported version (Python 3.9 currently). In rare cases + if it works for the oldest supported version (Python 3.10 currently). In rare cases tests might fail with the oldest version when you use features that are available in newer Python versions. For that purpose we have ``airflow.compat`` package where we keep back-ported useful features from newer versions. diff --git a/contributing-docs/07_local_virtualenv.rst b/contributing-docs/07_local_virtualenv.rst index 501e23ac9556d..adb97fe99fa9c 100644 --- a/contributing-docs/07_local_virtualenv.rst +++ b/contributing-docs/07_local_virtualenv.rst @@ -34,7 +34,7 @@ Required Software Packages Use system-level package managers like yum, apt-get for Linux, or Homebrew for macOS to install required software packages: -* Python (One of: 3.9, 3.10, 3.11, 3.12) +* Python (One of: 3.10, 3.11, 3.12) * MySQL 5.7+ * libxml * helm (only for helm chart tests) @@ -100,11 +100,11 @@ Installing Python versions This step can be skipped - ``uv`` will automatically install the Python version you need when you create a virtualenv. -You can install Python versions using ``uv python install`` command. For example, to install Python 3.9.7, you can run: +You can install Python versions using ``uv python install`` command. For example, to install Python 3.10.7, you can run: .. code:: bash - uv python install 3.9.7 + uv python install 3.10.7 This is optional step - ``uv`` will automatically install the Python version you need when you create a virtualenv. @@ -124,7 +124,7 @@ with a specific Python version by running: .. code:: bash - uv venv --python 3.9.7 + uv venv --python 3.10.7 You can also create a venv with a different venv directory name by running: @@ -275,12 +275,12 @@ to avoid "works-for-me" syndrome, where you use different version of dependencie that are used in main, CI tests and by other contributors. There are different constraint files for different python versions. For example this command will install -all basic devel requirements and requirements of google provider as last successfully tested for Python 3.9: +all basic devel requirements and requirements of google provider as last successfully tested for Python 3.10: .. code:: bash uv pip install -e ".[devel,google]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.10.txt" In the future we will utilise ``uv.lock`` to manage dependencies and constraints, but for the moment we do not @@ -305,7 +305,7 @@ and install to latest supported ones by pure Airflow core. .. code:: bash uv pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.10.txt" These are examples of the development options available with the local virtualenv in your IDE: diff --git a/contributing-docs/08_static_code_checks.rst b/contributing-docs/08_static_code_checks.rst index 9d6c1b764a030..431d55c95f1e6 100644 --- a/contributing-docs/08_static_code_checks.rst +++ b/contributing-docs/08_static_code_checks.rst @@ -40,7 +40,7 @@ use. So, you can be sure your modifications will also work for CI if they pass pre-commit hooks. We have integrated the fantastic `pre-commit `__ framework -in our development workflow. To install and use it, you need at least Python 3.9 locally. +in our development workflow. To install and use it, you need at least Python 3.10 locally. Installing pre-commit hooks --------------------------- diff --git a/contributing-docs/testing/docker_compose_tests.rst b/contributing-docs/testing/docker_compose_tests.rst index 603c4cccffc3d..0a8bc1e5a1f80 100644 --- a/contributing-docs/testing/docker_compose_tests.rst +++ b/contributing-docs/testing/docker_compose_tests.rst @@ -48,7 +48,7 @@ Running complete test with breeze: .. code-block:: bash - breeze prod-image build --python 3.9 + breeze prod-image build --python 3.10 breeze testing docker-compose-tests In case the test fails, it will dump the logs from the running containers to the console and it @@ -65,8 +65,8 @@ to see the output of the test as it happens (it can be also set via The test can be also run manually with ``pytest docker_tests/test_docker_compose_quick_start.py`` command, provided that you have a local Airflow venv with ``dev`` extra set and the ``DOCKER_IMAGE`` environment variable is set to the image you want to test. The variable defaults -to ``ghcr.io/apache/airflow/main/prod/python3.9:latest`` which is built by default -when you run ``breeze prod-image build --python 3.9``. also the switches ``--skip-docker-compose-deletion`` +to ``ghcr.io/apache/airflow/main/prod/python3.10:latest`` which is built by default +when you run ``breeze prod-image build --python 3.10``. also the switches ``--skip-docker-compose-deletion`` and ``--wait-for-containers-timeout`` can only be passed via environment variables. If you want to debug the deployment using ``docker compose`` commands after ``SKIP_DOCKER_COMPOSE_DELETION`` @@ -87,7 +87,7 @@ the prod image build command above. .. code-block:: bash - export AIRFLOW_IMAGE_NAME=ghcr.io/apache/airflow/main/prod/python3.9:latest + export AIRFLOW_IMAGE_NAME=ghcr.io/apache/airflow/main/prod/python3.10:latest and follow the instructions in the `Running Airflow in Docker `_ diff --git a/contributing-docs/testing/k8s_tests.rst b/contributing-docs/testing/k8s_tests.rst index 49fb8e83bd44d..3910532cf494c 100644 --- a/contributing-docs/testing/k8s_tests.rst +++ b/contributing-docs/testing/k8s_tests.rst @@ -47,7 +47,7 @@ per each combination of Python and Kubernetes version. This is used during CI wh tests against those different clusters - even in parallel. The cluster name follows the pattern ``airflow-python-X.Y-vA.B.C`` where X.Y is a major/minor Python version -and A.B.C is Kubernetes version. Example cluster name: ``airflow-python-3.9-v1.24.0`` +and A.B.C is Kubernetes version. Example cluster name: ``airflow-python-3.10-v1.24.0`` Most of the commands can be executed in parallel for multiple images/clusters by adding ``--run-in-parallel`` to create clusters or deploy airflow. Similarly checking for status, dumping logs and deleting clusters @@ -234,7 +234,7 @@ Should result in KinD creating the K8S cluster. .. code-block:: text - Config created in /Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.24.2/.kindconfig.yaml: + Config created in /Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.10-v1.24.2/.kindconfig.yaml: kind: Cluster apiVersion: kind.x-k8s.io/v1alpha4 @@ -251,7 +251,7 @@ Should result in KinD creating the K8S cluster. listenAddress: "127.0.0.1" protocol: TCP - Creating cluster "airflow-python-3.9-v1.24.2" ... + Creating cluster "airflow-python-3.10-v1.24.2" ... ✓ Ensuring node image (kindest/node:v1.24.2) 🖼 ✓ Preparing nodes 📦 📦 ✓ Writing configuration 📜 @@ -259,10 +259,10 @@ Should result in KinD creating the K8S cluster. ✓ Installing CNI 🔌 ✓ Installing StorageClass 💾 ✓ Joining worker nodes 🚜 - Set kubectl context to "kind-airflow-python-3.9-v1.24.2" + Set kubectl context to "kind-airflow-python-3.10-v1.24.2" You can now use your cluster with: - kubectl cluster-info --context kind-airflow-python-3.9-v1.24.2 + kubectl cluster-info --context kind-airflow-python-3.10-v1.24.2 Not sure what to do next? 😅 Check out https://kind.sigs.k8s.io/docs/user/quick-start/ @@ -270,9 +270,9 @@ Should result in KinD creating the K8S cluster. Connecting to localhost:18150. Num try: 1 Error when connecting to localhost:18150 : ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) - Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.9 --kubernetes-version v1.24.2` to (re)deploy airflow + Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.10 --kubernetes-version v1.24.2` to (re)deploy airflow - KinD cluster airflow-python-3.9-v1.24.2 created! + KinD cluster airflow-python-3.10-v1.24.2 created! NEXT STEP: You might now configure your cluster by: @@ -287,20 +287,20 @@ Should result in KinD creating the K8S cluster. .. code-block:: text - Configuring airflow-python-3.9-v1.24.2 to be ready for Airflow deployment - Deleting K8S namespaces for kind-airflow-python-3.9-v1.24.2 + Configuring airflow-python-3.10-v1.24.2 to be ready for Airflow deployment + Deleting K8S namespaces for kind-airflow-python-3.10-v1.24.2 Error from server (NotFound): namespaces "airflow" not found Error from server (NotFound): namespaces "test-namespace" not found Creating namespaces namespace/airflow created namespace/test-namespace created - Created K8S namespaces for cluster kind-airflow-python-3.9-v1.24.2 + Created K8S namespaces for cluster kind-airflow-python-3.10-v1.24.2 - Deploying test resources for cluster kind-airflow-python-3.9-v1.24.2 + Deploying test resources for cluster kind-airflow-python-3.10-v1.24.2 persistentvolume/test-volume created persistentvolumeclaim/test-volume created service/airflow-webserver-node-port created - Deployed test resources for cluster kind-airflow-python-3.9-v1.24.2 + Deployed test resources for cluster kind-airflow-python-3.10-v1.24.2 NEXT STEP: You might now build your k8s image by: @@ -318,45 +318,45 @@ Should show the status of current KinD cluster. .. code-block:: text ======================================================================================================================== - Cluster: airflow-python-3.9-v1.24.2 + Cluster: airflow-python-3.10-v1.24.2 - * KUBECONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.24.2/.kubeconfig - * KINDCONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.24.2/.kindconfig.yaml + * KUBECONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.10-v1.24.2/.kubeconfig + * KINDCONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.10-v1.24.2/.kindconfig.yaml - Cluster info: airflow-python-3.9-v1.24.2 + Cluster info: airflow-python-3.10-v1.24.2 Kubernetes control plane is running at https://127.0.0.1:48366 CoreDNS is running at https://127.0.0.1:48366/api/v1/namespaces/kube-system/services/kube-dns:dns/proxy To further debug and diagnose cluster problems, use 'kubectl cluster-info dump'. - Storage class for airflow-python-3.9-v1.24.2 + Storage class for airflow-python-3.10-v1.24.2 NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE standard (default) rancher.io/local-path Delete WaitForFirstConsumer false 83s - Running pods for airflow-python-3.9-v1.24.2 + Running pods for airflow-python-3.10-v1.24.2 NAME READY STATUS RESTARTS AGE coredns-6d4b75cb6d-rwp9d 1/1 Running 0 71s coredns-6d4b75cb6d-vqnrc 1/1 Running 0 71s - etcd-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s + etcd-airflow-python-3.10-v1.24.2-control-plane 1/1 Running 0 84s kindnet-ckc8l 1/1 Running 0 69s kindnet-qqt8k 1/1 Running 0 71s - kube-apiserver-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s - kube-controller-manager-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s + kube-apiserver-airflow-python-3.10-v1.24.2-control-plane 1/1 Running 0 84s + kube-controller-manager-airflow-python-3.10-v1.24.2-control-plane 1/1 Running 0 84s kube-proxy-6g7hn 1/1 Running 0 69s kube-proxy-dwfvp 1/1 Running 0 71s - kube-scheduler-airflow-python-3.9-v1.24.2-control-plane 1/1 Running 0 84s + kube-scheduler-airflow-python-3.10-v1.24.2-control-plane 1/1 Running 0 84s KinD Cluster API server URL: http://localhost:48366 Connecting to localhost:18150. Num try: 1 Error when connecting to localhost:18150 : ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) - Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.9 --kubernetes-version v1.24.2` to (re)deploy airflow + Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.10 --kubernetes-version v1.24.2` to (re)deploy airflow - Cluster healthy: airflow-python-3.9-v1.24.2 + Cluster healthy: airflow-python-3.10-v1.24.2 5. Build the image base on PROD Airflow image. You need to build the PROD image first (the command will guide you if you did not) either by running the build separately or passing ``--rebuild-base-image`` @@ -374,15 +374,15 @@ Should show the status of current KinD cluster. .. code-block:: text - Building the K8S image for Python 3.9 using Airflow base image: ghcr.io/apache/airflow/main/prod/python3.9:latest + Building the K8S image for Python 3.10 using Airflow base image: ghcr.io/apache/airflow/main/prod/python3.10:latest [+] Building 0.1s (8/8) FINISHED => [internal] load build definition from Dockerfile 0.0s => => transferring dockerfile: 301B 0.0s => [internal] load .dockerignore 0.0s => => transferring context: 35B 0.0s - => [internal] load metadata for ghcr.io/apache/airflow/main/prod/python3.9:latest 0.0s - => [1/3] FROM ghcr.io/apache/airflow/main/prod/python3.9:latest 0.0s + => [internal] load metadata for ghcr.io/apache/airflow/main/prod/python3.10:latest 0.0s + => [1/3] FROM ghcr.io/apache/airflow/main/prod/python3.10:latest 0.0s => [internal] load build context 0.0s => => transferring context: 3.00kB 0.0s => CACHED [2/3] COPY airflow/example_dags/ /opt/airflow/dags/ 0.0s @@ -390,7 +390,7 @@ Should show the status of current KinD cluster. => exporting to image 0.0s => => exporting layers 0.0s => => writing image sha256:c0bdd363c549c3b0731b8e8ce34153d081f239ee2b582355b7b3ffd5394c40bb 0.0s - => => naming to ghcr.io/apache/airflow/main/prod/python3.9-kubernetes:latest + => => naming to ghcr.io/apache/airflow/main/prod/python3.10-kubernetes:latest NEXT STEP: You might now upload your k8s image by: @@ -410,9 +410,9 @@ Should show the status of current KinD cluster. Good version of kubectl installed: 1.25.0 in /Users/jarek/IdeaProjects/airflow/kubernetes-tests/.venv/bin Good version of helm installed: 3.9.2 in /Users/jarek/IdeaProjects/airflow/kubernetes-tests/.venv/bin Stable repo is already added - Uploading Airflow image ghcr.io/apache/airflow/main/prod/python3.9-kubernetes to cluster airflow-python-3.9-v1.24.2 - Image: "ghcr.io/apache/airflow/main/prod/python3.9-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.9-v1.24.2-worker", loading... - Image: "ghcr.io/apache/airflow/main/prod/python3.9-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.9-v1.24.2-control-plane", loading... + Uploading Airflow image ghcr.io/apache/airflow/main/prod/python3.10-kubernetes to cluster airflow-python-3.10-v1.24.2 + Image: "ghcr.io/apache/airflow/main/prod/python3.10-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.10-v1.24.2-worker", loading... + Image: "ghcr.io/apache/airflow/main/prod/python3.10-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.10-v1.24.2-control-plane", loading... NEXT STEP: You might now deploy Airflow by: @@ -427,8 +427,8 @@ Should show the status of current KinD cluster. .. code-block:: text - Deploying Airflow for cluster airflow-python-3.9-v1.24.2 - Deploying kind-airflow-python-3.9-v1.24.2 with Airflow Helm Chart. + Deploying Airflow for cluster airflow-python-3.10-v1.24.2 + Deploying kind-airflow-python-3.10-v1.24.2 with Airflow Helm Chart. Copied chart sources to /private/var/folders/v3/gvj4_mw152q556w2rrh7m46w0000gn/T/chart_edu__kir/chart Deploying Airflow from /private/var/folders/v3/gvj4_mw152q556w2rrh7m46w0000gn/T/chart_edu__kir/chart NAME: airflow @@ -470,12 +470,12 @@ Should show the status of current KinD cluster. Information on how to set a static webserver secret key can be found here: https://airflow.apache.org/docs/helm-chart/stable/production-guide.html#api-secret-key - Deployed kind-airflow-python-3.9-v1.24.2 with Airflow Helm Chart. + Deployed kind-airflow-python-3.10-v1.24.2 with Airflow Helm Chart. - Airflow for Python 3.9 and K8S version v1.24.2 has been successfully deployed. + Airflow for Python 3.10 and K8S version v1.24.2 has been successfully deployed. - The KinD cluster name: airflow-python-3.9-v1.24.2 - The kubectl cluster name: kind-airflow-python-3.9-v1.24.2. + The KinD cluster name: airflow-python-3.10-v1.24.2 + The kubectl cluster name: kind-airflow-python-3.10-v1.24.2. KinD Cluster API server URL: http://localhost:48366 @@ -519,7 +519,7 @@ The virtualenv required will be created automatically when the scripts are run. Good version of helm installed: 3.16.4 in /Users/jarek/IdeaProjects/airflow/kubernetes-tests/.venv/bin Stable repo is already added - Running tests with kind-airflow-python-3.9-v1.29.12 cluster. + Running tests with kind-airflow-python-3.10-v1.29.12 cluster. Command to run: uv run pytest kubernetes-tests/tests/ Installed 74 packages in 179ms /Users/jarek/IdeaProjects/airflow/.venv/lib/python3.12/site-packages/pytest_asyncio/plugin.py:208: PytestDeprecationWarning: The configuration option "asyncio_default_fixture_loop_scope" is unset. @@ -553,7 +553,7 @@ Once you enter the environment, you receive this information: Entering interactive k8s shell. - (kind-airflow-python-3.9-v1.24.2:KubernetesExecutor)> + (kind-airflow-python-3.10-v1.24.2:KubernetesExecutor)> In a separate terminal you can open the k9s CLI: @@ -603,7 +603,7 @@ environment variable copying it from the result of "breeze k8s tests": echo ${KUBECONFIG} - /home/jarek/code/airflow/.build/.k8s-clusters/airflow-python-3.9-v1.28.13/.kube/config + /home/jarek/code/airflow/.build/.k8s-clusters/airflow-python-3.10-v1.28.13/.kube/config .. image:: images/kubeconfig-env.png :align: center @@ -671,9 +671,9 @@ Kind has also useful commands to inspect your running cluster: .. code-block:: text - Deleting KinD cluster airflow-python-3.9-v1.24.2! - Deleting cluster "airflow-python-3.9-v1.24.2" ... - KinD cluster airflow-python-3.9-v1.24.2 deleted! + Deleting KinD cluster airflow-python-3.10-v1.24.2! + Deleting cluster "airflow-python-3.10-v1.24.2" ... + KinD cluster airflow-python-3.10-v1.24.2 deleted! Running complete k8s tests diff --git a/contributing-docs/testing/unit_tests.rst b/contributing-docs/testing/unit_tests.rst index 698096f702990..c15776a963214 100644 --- a/contributing-docs/testing/unit_tests.rst +++ b/contributing-docs/testing/unit_tests.rst @@ -185,7 +185,7 @@ rerun in Breeze as you will (``-n auto`` will parallelize tests using ``pytest-x .. code-block:: bash - breeze shell --backend none --python 3.9 + breeze shell --backend none --python 3.10 > pytest airflow-core/tests --skip-db-tests -n auto @@ -227,7 +227,7 @@ You can also run DB tests with ``breeze`` dockerized environment. You can choose ``--backend`` flag. The default is ``sqlite`` but you can also use others such as ``postgres`` or ``mysql``. You can also select backend version and Python version to use. You can specify the ``test-type`` to run - breeze will list the test types you can run with ``--help`` and provide auto-complete for them. Example -below runs the ``Core`` tests with ``postgres`` backend and ``3.9`` Python version +below runs the ``Core`` tests with ``postgres`` backend and ``3.10`` Python version You can also run the commands via ``breeze testing core-tests`` or ``breeze testing providers-tests`` - by adding the parallel flags manually: @@ -249,7 +249,7 @@ either by package/module/test or by test type - whatever ``pytest`` supports. .. code-block:: bash - breeze shell --backend postgres --python 3.9 + breeze shell --backend postgres --python 3.10 > pytest airflow-core/tests --run-db-tests-only As explained before, you cannot run DB tests in parallel using ``pytest-xdist`` plugin, but ``breeze`` has @@ -258,7 +258,7 @@ and you can run the tests using ``--run-in-parallel`` flag. .. code-block:: bash - breeze testing core-tests --run-db-tests-only --backend postgres --python 3.9 --run-in-parallel + breeze testing core-tests --run-db-tests-only --backend postgres --python 3.10 --run-in-parallel Examples of marking test as DB test ................................... @@ -1168,7 +1168,7 @@ Herr id how to reproduce it. .. code-block:: bash - breeze ci-image build --python 3.9 + breeze ci-image build --python 3.10 2. Build providers from latest sources: diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index ba1066d02d12a..0fafbc345a7e9 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -789,7 +789,7 @@ Optionally it can be followed with constraints ```shell script pip install apache-airflow==rc \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-/constraints-3.10.txt" ``` Note that the constraints contain python version that you are installing it with. @@ -801,7 +801,7 @@ There is also an easy way of installation with Breeze if you have the latest sou Running the following command will use tmux inside breeze, create `admin` user and run Webserver & Scheduler: ```shell script -breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.9 --backend postgres +breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.10 --backend postgres ``` You can also choose different executors and extras to install when you are installing airflow this way. For @@ -809,7 +809,7 @@ example in order to run Airflow with CeleryExecutor and install celery, google a Airflow 2.7.0, you need to have celery provider installed to run Airflow with CeleryExecutor) you can run: ```shell script -breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.9 --backend postgres \ +breeze start-airflow --use-airflow-version 2.7.0rc1 --python 3.10 --backend postgres \ --executor CeleryExecutor --airflow-extras "celery,google,amazon" ``` diff --git a/dev/README_RELEASE_PROVIDERS.md b/dev/README_RELEASE_PROVIDERS.md index f0a45a560af28..089659c91751b 100644 --- a/dev/README_RELEASE_PROVIDERS.md +++ b/dev/README_RELEASE_PROVIDERS.md @@ -1002,7 +1002,7 @@ pip install apache-airflow-providers-==rc ### Installing with Breeze ```shell -breeze start-airflow --use-airflow-version 2.2.4 --python 3.9 --backend postgres \ +breeze start-airflow --use-airflow-version 2.2.4 --python 3.10 --backend postgres \ --load-example-dags --load-default-connections ``` diff --git a/dev/breeze/README.md b/dev/breeze/README.md index d1c689d1791c3..f89c5537f10c9 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -135,6 +135,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: 098162a9dca075feeada933f58455aa74f58811a34959f06bd3298aef9773a60f3800e9d5d00b9335d300572842a2838f06bd83f7a79a5ce5f529fb6202059af +Package config hash: aff0246d8f21f59dc1299a6d8298e166ece38be97ee62f0fcb8641e59d991f7e8d649cfcd7d709b20c75d251761e1b3fd18dabe9d2ebc9f45ea58f7379fa125c --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/doc/01_installation.rst b/dev/breeze/doc/01_installation.rst index 9b54ed17d7542..5f0acd5e080f8 100644 --- a/dev/breeze/doc/01_installation.rst +++ b/dev/breeze/doc/01_installation.rst @@ -332,7 +332,7 @@ that Breeze works on .. warning:: Upgrading from earlier Python version - If you used Breeze with Python 3.8 and when running it, it will complain that it needs Python 3.9. In this + If you used Breeze with Python 3.8 and when running it, it will complain that it needs Python 3.10. In this case you should force-reinstall Breeze with ``uv`` (or ``pipx``): .. code-block:: bash @@ -373,13 +373,13 @@ that Breeze works on .. code-block:: bash - uv tool install --python 3.9.16 ./dev/breeze --force + uv tool install --python 3.10.16 ./dev/breeze --force or .. code-block:: bash - pipx install -e ./dev/breeze --python /Users/airflow/.pyenv/versions/3.9.16/bin/python --force + pipx install -e ./dev/breeze --python /Users/airflow/.pyenv/versions/3.10.16/bin/python --force Running Breeze for the first time diff --git a/dev/breeze/doc/03_developer_tasks.rst b/dev/breeze/doc/03_developer_tasks.rst index 3a425469283de..d269e98a0c840 100644 --- a/dev/breeze/doc/03_developer_tasks.rst +++ b/dev/breeze/doc/03_developer_tasks.rst @@ -34,12 +34,12 @@ You can use additional ``breeze`` flags to choose your environment. You can spec version to use, and backend (the meta-data database). Thanks to that, with Breeze, you can recreate the same environments as we have in matrix builds in the CI. See next chapter for backend selection. -For example, you can choose to run Python 3.9 tests with MySQL as backend and with mysql version 8 +For example, you can choose to run Python 3.10 tests with MySQL as backend and with mysql version 8 as follows: .. code-block:: bash - breeze --python 3.9 --backend mysql --mysql-version 8.0 + breeze --python 3.10 --backend mysql --mysql-version 8.0 .. note:: Note for Windows WSL2 users @@ -55,7 +55,7 @@ Try adding ``--builder=default`` to your command. For example: .. code-block:: bash - breeze --builder=default --python 3.9 --backend mysql --mysql-version 8.0 + breeze --builder=default --python 3.10 --backend mysql --mysql-version 8.0 The choices you make are persisted in the ``./.build/`` cache directory so that next time when you use the ``breeze`` script, it could use the values that were used previously. This way you do not have to specify @@ -363,7 +363,7 @@ When you are starting Airflow from local sources, www asset compilation is autom .. code-block:: bash - breeze --python 3.9 --backend mysql start-airflow + breeze --python 3.10 --backend mysql start-airflow You can also use it to start different executor. @@ -376,7 +376,7 @@ You can also use it to start any released version of Airflow from ``PyPI`` with .. code-block:: bash - breeze start-airflow --python 3.9 --backend mysql --use-airflow-version 2.7.0 + breeze start-airflow --python 3.10 --backend mysql --use-airflow-version 2.7.0 When you are installing version from PyPI, it's also possible to specify extras that should be used when installing Airflow - you can provide several extras separated by coma - for example to install diff --git a/dev/breeze/doc/04_troubleshooting.rst b/dev/breeze/doc/04_troubleshooting.rst index f9828ef1b700e..9f7a409b01f95 100644 --- a/dev/breeze/doc/04_troubleshooting.rst +++ b/dev/breeze/doc/04_troubleshooting.rst @@ -83,7 +83,7 @@ describe your problem. stated in `This comment `_ and allows to run Breeze with no problems. -Cannot import name 'cache' or Python >=3.9 required +Cannot import name 'cache' or Python >=3.10 required --------------------------------------------------- When you see this error: @@ -96,7 +96,7 @@ or .. code-block:: - ERROR: Package 'blacken-docs' requires a different Python: 3.8.18 not in '>=3.9' + ERROR: Package 'blacken-docs' requires a different Python: 3.8.18 not in '>=3.10' It means that your pre-commit hook is installed with (already End-Of-Life) Python 3.8 and you should reinstall @@ -107,7 +107,7 @@ This can be done with ``uv tool`` to install ``pre-commit``) .. code-block:: bash uv tool uninstall pre-commit - uv tool install pre-commit --python 3.9 --force --with pre-commit-uv + uv tool install pre-commit --python 3.10 --force --with pre-commit-uv pre-commit clean pre-commit install @@ -116,7 +116,7 @@ You can also use ``pipx`` .. code-block:: bash pipx uninstall pre-commit - pipx install pre-commit --python $(which python3.9) --force + pipx install pre-commit --python $(which python3.10) --force # This one allows pre-commit to use uv for venvs installed by pre-commit pipx inject pre-commit pre-commit-uv # optionally if you want to use uv to install virtualenvs pre-commit clean diff --git a/dev/breeze/doc/05_test_commands.rst b/dev/breeze/doc/05_test_commands.rst index a51013e171a25..1253790707b49 100644 --- a/dev/breeze/doc/05_test_commands.rst +++ b/dev/breeze/doc/05_test_commands.rst @@ -570,7 +570,7 @@ as executor you use, similar to: .. code-block:: bash - (kind-airflow-python-3.9-v1.24.0:KubernetesExecutor)> + (kind-airflow-python-3.10-v1.24.0:KubernetesExecutor)> The shell automatically activates the virtual environment that has all appropriate dependencies @@ -579,7 +579,7 @@ be created and Airflow deployed to it before running the tests): .. code-block:: bash - (kind-airflow-python-3.9-v1.24.0:KubernetesExecutor)> pytest test_kubernetes_executor.py + (kind-airflow-python-3.10-v1.24.0:KubernetesExecutor)> pytest test_kubernetes_executor.py ================================================= test session starts ================================================= platform linux -- Python 3.10.6, pytest-6.2.5, py-1.11.0, pluggy-1.0.0 -- /home/jarek/code/airflow/kubernetes-tests/.venv/bin/python cachedir: .pytest_cache @@ -598,7 +598,7 @@ be created and Airflow deployed to it before running the tests): -- Docs: https://docs.pytest.org/en/stable/warnings.html ============================================ 2 passed, 1 warning in 38.62s ============================================ - (kind-airflow-python-3.9-v1.24.0:KubernetesExecutor)> + (kind-airflow-python-3.10-v1.24.0:KubernetesExecutor)> All parameters of the command are here: diff --git a/dev/breeze/doc/06_managing_docker_images.rst b/dev/breeze/doc/06_managing_docker_images.rst index d743a7ef7bc17..d30e73728c8b2 100644 --- a/dev/breeze/doc/06_managing_docker_images.rst +++ b/dev/breeze/doc/06_managing_docker_images.rst @@ -120,13 +120,13 @@ To load the image from specific PR, you can use the following command: .. code-block:: bash - breeze ci-image load --from-pr 12345 --python 3.9 --github-token + breeze ci-image load --from-pr 12345 --python 3.10 --github-token To load the image from specific job run (for example 12538475388), you can use the following command, find the run id from github action runs. .. code-block:: bash - breeze ci-image load --from-run 12538475388 --python 3.9 --github-token + breeze ci-image load --from-run 12538475388 --python 3.10 --github-token After you load the image, you can reproduce the very exact environment that was used in the CI run by entering breeze container without mounting your local sources: @@ -220,10 +220,10 @@ suffix and they need to also be paired with corresponding runtime dependency add .. code-block:: bash - breeze prod-image build --python 3.9 --additional-dev-deps "libasound2-dev" \ + breeze prod-image build --python 3.10 --additional-dev-deps "libasound2-dev" \ --additional-runtime-apt-deps "libasound2" -Same as above but uses python 3.9. +Same as above but uses python 3.10. Building PROD image ................... diff --git a/dev/breeze/doc/10_advanced_breeze_topics.rst b/dev/breeze/doc/10_advanced_breeze_topics.rst index 3cd22855db2fe..d3f393c2b3963 100644 --- a/dev/breeze/doc/10_advanced_breeze_topics.rst +++ b/dev/breeze/doc/10_advanced_breeze_topics.rst @@ -49,7 +49,7 @@ make sure to follow these steps: this will bypass the check we run in Breeze to see if there are new requirements to install for it See example configuration for PyCharm which has run/debug configuration for -``breeze sbom generate-providers-requirements --provider-id sqlite --python 3.9`` +``breeze sbom generate-providers-requirements --provider-id sqlite --python 3.10`` .. raw:: html diff --git a/dev/breeze/doc/adr/0002-implement-standalone-python-command.md b/dev/breeze/doc/adr/0002-implement-standalone-python-command.md index 87bbe46a6486e..79401b85f95b5 100644 --- a/dev/breeze/doc/adr/0002-implement-standalone-python-command.md +++ b/dev/breeze/doc/adr/0002-implement-standalone-python-command.md @@ -138,7 +138,7 @@ There are a few properties of Breeze/CI scripts that should be maintained though run a command and get everything done with the least number of prerequisites * The prerequisites for Breeze and CI are: - * Python 3.9+ (Python 3.9 end of life is October 2025) + * Python 3.10+ (Python 3.10 end of life is October 2025) * Docker (23.0+) * Docker Compose (2.16.0+) * No other tools and CLI commands should be needed diff --git a/dev/breeze/doc/ci/02_images.md b/dev/breeze/doc/ci/02_images.md index 00007ded26e71..4dbfdb6e19637 100644 --- a/dev/breeze/doc/ci/02_images.md +++ b/dev/breeze/doc/ci/02_images.md @@ -129,17 +129,17 @@ The images are built with default extras - different extras for CI and production image and you can change the extras via the `--airflow-extras` parameters and add new ones with `--additional-airflow-extras`. -For example if you want to build Python 3.9 version of production image +For example if you want to build Python 3.10 version of production image with "all" extras installed you should run this command: ``` bash -breeze prod-image build --python 3.9 --airflow-extras "all" +breeze prod-image build --python 3.10 --airflow-extras "all" ``` If you just want to add new extras you can add them like that: ``` bash -breeze prod-image build --python 3.9 --additional-airflow-extras "all" +breeze prod-image build --python 3.10 --additional-airflow-extras "all" ``` The command that builds the CI image is optimized to minimize the time @@ -160,7 +160,7 @@ You can also build production images from PIP packages via providing `--install-airflow-version` parameter to Breeze: ``` bash -breeze prod-image build --python 3.9 --additional-airflow-extras=trino --install-airflow-version=2.0.0 +breeze prod-image build --python 3.10 --additional-airflow-extras=trino --install-airflow-version=2.0.0 ``` This will build the image using command similar to: @@ -168,7 +168,7 @@ This will build the image using command similar to: ``` bash pip install \ apache-airflow[async,amazon,celery,cncf.kubernetes,docker,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv]==2.0.0 \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.10.txt" ``` > [!NOTE] @@ -199,7 +199,7 @@ HEAD of development for constraints): ``` bash pip install "https://github.com/apache/airflow/archive/.tar.gz#egg=apache-airflow" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.10.txt" ``` You can also skip installing airflow and install it from locally @@ -207,7 +207,7 @@ provided files by using `--install-distributions-from-context` parameter to Breeze: ``` bash -breeze prod-image build --python 3.9 --additional-airflow-extras=trino --install-distributions-from-context +breeze prod-image build --python 3.10 --additional-airflow-extras=trino --install-distributions-from-context ``` In this case you airflow and all packages (.whl files) should be placed @@ -243,20 +243,20 @@ flags: `registry` (default), `local`, or `disabled` flags when you run Breeze commands. For example: ``` bash -breeze ci-image build --python 3.9 --docker-cache local +breeze ci-image build --python 3.10 --docker-cache local ``` Will build the CI image using local build cache (note that it will take quite a long time the first time you run it). ``` bash -breeze prod-image build --python 3.9 --docker-cache registry +breeze prod-image build --python 3.10 --docker-cache registry ``` Will build the production image with cache used from registry. ``` bash -breeze prod-image build --python 3.9 --docker-cache disabled +breeze prod-image build --python 3.10 --docker-cache disabled ``` Will build the production image from the scratch. @@ -359,7 +359,7 @@ you can build the image in the Here just a few examples are presented which should give you general understanding of what you can customize. -This builds the production image in version 3.9 with additional airflow +This builds the production image in version 3.10 with additional airflow extras from 2.0.0 PyPI package and additional apt dev and runtime dependencies. @@ -371,7 +371,7 @@ plugin installed. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \ --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \ @@ -382,7 +382,7 @@ the same image can be built using `breeze` (it supports auto-completion of the options): ``` bash -breeze ci-image build --python 3.9 --additional-airflow-extras=jdbc --additional-python-deps="pandas" \ +breeze ci-image build --python 3.10 --additional-airflow-extras=jdbc --additional-python-deps="pandas" \ --additional-dev-apt-deps="gcc g++" ``` @@ -396,7 +396,7 @@ comment](https://github.com/apache/airflow/issues/8605#issuecomment-690065621): ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack" \ --build-arg ADDITIONAL_PYTHON_DEPS="apache-airflow-providers-odbc \ @@ -419,93 +419,93 @@ DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ The following build arguments (`--build-arg` in docker build command) can be used for CI images: -| Build argument | Default value | Description | -|-----------------------------------|----------------------------|-------------------------------------------------------------------------------------------------------------------| -| `PYTHON_BASE_IMAGE` | `python:3.9-slim-bookworm` | Base Python image | -| `PYTHON_MAJOR_MINOR_VERSION` | `3.9` | major/minor version of Python (should match base image) | -| `DEPENDENCIES_EPOCH_NUMBER` | `2` | increasing this number will reinstall all apt dependencies | -| `ADDITIONAL_PIP_INSTALL_FLAGS` | | additional `pip` flags passed to the installation commands (except when reinstalling `pip` itself) | -| `HOME` | `/root` | Home directory of the root user (CI image has root user as default) | -| `AIRFLOW_HOME` | `/root/airflow` | Airflow's HOME (that's where logs and sqlite databases are stored) | -| `AIRFLOW_SOURCES` | `/opt/airflow` | Mounted sources of Airflow | -| `AIRFLOW_REPO` | `apache/airflow` | the repository from which PIP dependencies are pre-installed | -| `AIRFLOW_BRANCH` | `main` | the branch from which PIP dependencies are pre-installed | -| `AIRFLOW_CI_BUILD_EPOCH` | `1` | increasing this value will reinstall PIP dependencies from the repository from scratch | -| `AIRFLOW_CONSTRAINTS_LOCATION` | | If not empty, it will override the source of the constraints with the specified URL or file. | -| `AIRFLOW_CONSTRAINTS_REFERENCE` | `constraints-main` | reference (branch or tag) from GitHub repository from which constraints are used. | -| `AIRFLOW_EXTRAS` | `all` | extras to install | -| `UPGRADE_RANDOM_INDICATOR_STRING` | | If set to any random value the dependencies are upgraded to newer versions. In CI it is set to build id. | -| `ADDITIONAL_AIRFLOW_EXTRAS` | | additional extras to install | -| `ADDITIONAL_PYTHON_DEPS` | | additional Python dependencies to install | -| `DEV_APT_COMMAND` | | Dev apt command executed before dev deps are installed in the first part of image | -| `ADDITIONAL_DEV_APT_COMMAND` | | Additional Dev apt command executed before dev dep are installed in the first part of the image | -| `DEV_APT_DEPS` | | Dev APT dependencies installed in the first part of the image (default empty means default dependencies are used) | -| `ADDITIONAL_DEV_APT_DEPS` | | Additional apt dev dependencies installed in the first part of the image | -| `ADDITIONAL_DEV_APT_ENV` | | Additional env variables defined when installing dev deps | -| `AIRFLOW_PIP_VERSION` | `25.1.1` | `pip` version used. | -| `AIRFLOW_UV_VERSION` | `0.7.16` | `uv` version used. | -| `AIRFLOW_PRE_COMMIT_VERSION` | `4.2.0` | `pre-commit` version used. | -| `AIRFLOW_PRE_COMMIT_UV_VERSION` | `4.1.4` | `pre-commit-uv` version used. | -| `AIRFLOW_USE_UV` | `true` | Whether to use UV for installation. | -| `PIP_PROGRESS_BAR` | `on` | Progress bar for PIP installation | +| Build argument | Default value | Description | +|-----------------------------------|-----------------------------|-------------------------------------------------------------------------------------------------------------------| +| `PYTHON_BASE_IMAGE` | `python:3.10-slim-bookworm` | Base Python image | +| `PYTHON_MAJOR_MINOR_VERSION` | `3.10` | major/minor version of Python (should match base image) | +| `DEPENDENCIES_EPOCH_NUMBER` | `2` | increasing this number will reinstall all apt dependencies | +| `ADDITIONAL_PIP_INSTALL_FLAGS` | | additional `pip` flags passed to the installation commands (except when reinstalling `pip` itself) | +| `HOME` | `/root` | Home directory of the root user (CI image has root user as default) | +| `AIRFLOW_HOME` | `/root/airflow` | Airflow's HOME (that's where logs and sqlite databases are stored) | +| `AIRFLOW_SOURCES` | `/opt/airflow` | Mounted sources of Airflow | +| `AIRFLOW_REPO` | `apache/airflow` | the repository from which PIP dependencies are pre-installed | +| `AIRFLOW_BRANCH` | `main` | the branch from which PIP dependencies are pre-installed | +| `AIRFLOW_CI_BUILD_EPOCH` | `1` | increasing this value will reinstall PIP dependencies from the repository from scratch | +| `AIRFLOW_CONSTRAINTS_LOCATION` | | If not empty, it will override the source of the constraints with the specified URL or file. | +| `AIRFLOW_CONSTRAINTS_REFERENCE` | `constraints-main` | reference (branch or tag) from GitHub repository from which constraints are used. | +| `AIRFLOW_EXTRAS` | `all` | extras to install | +| `UPGRADE_RANDOM_INDICATOR_STRING` | | If set to any random value the dependencies are upgraded to newer versions. In CI it is set to build id. | +| `ADDITIONAL_AIRFLOW_EXTRAS` | | additional extras to install | +| `ADDITIONAL_PYTHON_DEPS` | | additional Python dependencies to install | +| `DEV_APT_COMMAND` | | Dev apt command executed before dev deps are installed in the first part of image | +| `ADDITIONAL_DEV_APT_COMMAND` | | Additional Dev apt command executed before dev dep are installed in the first part of the image | +| `DEV_APT_DEPS` | | Dev APT dependencies installed in the first part of the image (default empty means default dependencies are used) | +| `ADDITIONAL_DEV_APT_DEPS` | | Additional apt dev dependencies installed in the first part of the image | +| `ADDITIONAL_DEV_APT_ENV` | | Additional env variables defined when installing dev deps | +| `AIRFLOW_PIP_VERSION` | `25.1.1` | `pip` version used. | +| `AIRFLOW_UV_VERSION` | `0.7.16` | `uv` version used. | +| `AIRFLOW_PRE_COMMIT_VERSION` | `4.2.0` | `pre-commit` version used. | +| `AIRFLOW_PRE_COMMIT_UV_VERSION` | `4.1.4` | `pre-commit-uv` version used. | +| `AIRFLOW_USE_UV` | `true` | Whether to use UV for installation. | +| `PIP_PROGRESS_BAR` | `on` | Progress bar for PIP installation | Here are some examples of how CI images can built manually. CI is always built from local sources. -This builds the CI image in version 3.9 with default extras ("all"). +This builds the CI image in version 3.10 with default extras ("all"). ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" --tag my-image:0.0.1 + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.9 with "gcp" extra only. +This builds the CI image in version 3.10 with "gcp" extra only. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_EXTRAS=gcp --tag my-image:0.0.1 ``` -This builds the CI image in version 3.9 with "apache-beam" extra added. +This builds the CI image in version 3.10 with "apache-beam" extra added. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="apache-beam" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.9 with "mssql" additional package +This builds the CI image in version 3.10 with "mssql" additional package added. ``` bash DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg ADDITIONAL_PYTHON_DEPS="mssql" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.9 with "gcc" and "g++" additional +This builds the CI image in version 3.10 with "gcc" and "g++" additional apt dev dependencies added. ``` DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" --tag my-image:0.0.1 ``` -This builds the CI image in version 3.9 with "jdbc" extra and +This builds the CI image in version 3.10 with "jdbc" extra and "default-jre-headless" additional apt runtime dependencies added. ``` DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_EXTRAS=jdbc \ --tag my-image:0.0.1 ``` @@ -567,7 +567,7 @@ percent-encoded when you access them via UI (/ = %2F) | PROD image | airflow/\/prod/python\ | faster to build or pull. Production image optimized for size. | - \ might be either "main" or "v2-\*-test" -- \ - Python version (Major + Minor).Should be one of \["3.9", "3.10", "3.11", "3.12" \]. +- \ - Python version (Major + Minor).Should be one of \["3.10", "3.11", "3.12" \]. ---- diff --git a/dev/breeze/doc/ci/04_selective_checks.md b/dev/breeze/doc/ci/04_selective_checks.md index 615bee819b782..15a60d24670d0 100644 --- a/dev/breeze/doc/ci/04_selective_checks.md +++ b/dev/breeze/doc/ci/04_selective_checks.md @@ -164,79 +164,79 @@ separated by spaces. This is to accommodate for the wau how outputs of this kind GitHub Actions to pass the list of parameters to a command to execute -| Output | Meaning of the output | Example value | List | -|---------------------------------------------------------|--------------------------------------------------------------------------------------------------------|-----------------------------------------|------| -| all-python-versions | List of all python versions there are available in the form of JSON array | \['3.9', '3.10'\] | | -| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.9 3.10 | * | -| all-versions | If set to true, then all python, k8s, DB versions are used for tests. | false | | -| basic-checks-only | Whether to run all static checks ("false") or only basic set of static checks ("true") | false | | -| build_system_changed_in_pyproject_toml | When builds system dependencies changed in pyproject.toml changed in the PR. | false | | -| ci-image-build | Whether CI image build is needed | true | | -| core-test-types-list-as-strings-in-json | Which test types should be run for unit tests for core | API Always Providers | * | -| debug-resources | Whether resources usage should be printed during parallel job execution ("true"/ "false") | false | | -| default-branch | Which branch is default for the build ("main" for main branch, "v2-4-test" for 2.4 line etc.) | main | | -| default-constraints-branch | Which branch is default for the build ("constraints-main" for main branch, "constraints-2-4" etc.) | constraints-main | | -| default-helm-version | Which Helm version to use as default | v3.9.4 | | -| default-kind-version | Which Kind version to use as default | v0.16.0 | | -| default-kubernetes-version | Which Kubernetes version to use as default | v1.25.2 | | -| default-mysql-version | Which MySQL version to use as default | 5.7 | | -| default-postgres-version | Which Postgres version to use as default | 10 | | -| default-python-version | Which Python version to use as default | 3.9 | | -| disable-airflow-repo-cache | Disables cache of the repo main cache in CI - aiflow will be installed without main installation cache | true | | -| docker-cache | Which cache should be used for images ("registry", "local" , "disabled") | registry | | -| docs-build | Whether to build documentation ("true"/"false") | true | | -| docs-list-as-string | What filter to apply to docs building - based on which documentation packages should be built | apache-airflow helm-chart google | * | -| excluded-providers-as-string c | List of providers that should be excluded from the build as space-separated string | amazon google | * | -| force-pip | Whether pip should be forced in the image build instead of uv ("true"/"false") | false | | -| full-tests-needed | Whether this build runs complete set of tests or only subset (for faster PR builds) \[1\] | false | | -| generated-dependencies-changed | Whether generated dependencies have changed ("true"/"false") | false | | -| has-migrations | Whether the PR has migrations ("true"/"false") | false | | -| hatch-build-changed | When hatch build.py changed in the PR. | false | | -| helm-test-packages-list-as-string | List of helm packages to test as JSON array | \["airflow_aux", "airflow_core"\] | * | -| helm-version | Which Helm version to use for tests | v3.15.3 | | -| include-success-outputs | Whether to include outputs of successful parallel tests ("true"/"false") | false | | -| individual-providers-test-types-list-as-strings-in-json | Which test types should be run for unit tests for providers (individually listed) | Providers[\amazon\] Providers\[google\] | * | -| is-committer-build | Whether the build is triggered by a committer | false | | -| is-legacy-ui-api-labeled | Whether the PR is labeled as legacy UI/API | false | | -| kind-version | Which Kind version to use for tests | v0.24.0 | | -| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.9-v1.25.2 3.10-v1.28.13 | * | -| kubernetes-versions | All Kubernetes versions to use for tests as JSON array | \['v1.25.2'\] | | -| kubernetes-versions-list-as-string | All Kubernetes versions to use for tests as space-separated string | v1.25.2 | * | -| latest-versions-only | If set, the number of Python, Kubernetes, DB versions will be limited to the latest ones. | false | | -| mypy-checks | List of folders to be considered for mypy checks | \["airflow_aux", "airflow_core"\] | | -| mysql-exclude | Which versions of MySQL to exclude for tests as JSON array | [] | | -| mysql-versions | Which versions of MySQL to use for tests as JSON array | \['8.0'\] | | -| needs-api-codegen | Whether "api-codegen" are needed to run ("true"/"false") | true | | -| needs-api-tests | Whether "api-tests" are needed to run ("true"/"false") | true | | -| needs-helm-tests | Whether Helm tests are needed to run ("true"/"false") | true | | -| needs-javascript-scans | Whether javascript CodeQL scans should be run ("true"/"false") | true | | -| needs-mypy | Whether mypy check is supposed to run in this build | true | | -| needs-python-scans | Whether Python CodeQL scans should be run ("true"/"false") | true | | -| only-new-ui-files | Whether only new UI files are present in the PR ("true"/"false") | false | | -| postgres-exclude | Which versions of Postgres to exclude for tests as JSON array | [] | | -| postgres-versions | Which versions of Postgres to use for tests as JSON array | \['12'\] | | -| prod-image-build | Whether PROD image build is needed | true | | -| providers-compatibility-tests-matrix | Matrix of providers compatibility tests: (python_version, airflow_version, removed_providers) | \[{}\] | | -| providers-test-types-list-as-strings-in-json | Which test types should be run for unit tests for providers | Providers Providers\[-google\] | * | -| pyproject-toml-changed | When pyproject.toml changed in the PR. | false | | -| python-versions | List of python versions to use for that build | \['3.9'\] | | -| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.9 | * | -| run-amazon-tests | Whether Amazon tests should be run ("true"/"false") | true | | -| run-kubernetes-tests | Whether Kubernetes tests should be run ("true"/"false") | true | | -| run-system-tests | Whether system tests should be run ("true"/"false") | true | | -| run-task-sdk-tests | Whether Task SDK tests should be run ("true"/"false") | true | | -| run-tests | Whether unit tests should be run ("true"/"false") | true | | -| run-ui-tests | Whether UI tests should be run ("true"/"false") | true | | -| run-www-tests | Whether Legacy WWW tests should be run ("true"/"false") | true | | -| amd-runners | List of labels assigned for runners for that build for public AMD runners | \["ubuntu-22.04"\] | | -| arm-runners | List of labels assigned for runners for that build for public ARM runners | \["ubuntu-22.04-arm"\] | | -| selected-providers-list-as-string | List of providers affected when they are selectively affected. | airbyte http | * | -| skip-pre-commits | Which pre-commits should be skipped during the static-checks run | flynt,identity | | -| skip-providers-tests | When provider tests should be skipped (on non-main branch or when no provider changes detected) | true | | -| sqlite-exclude | Which versions of Sqlite to exclude for tests as JSON array | [] | | -| testable-core-integrations | List of core integrations that are testable in the build as JSON array | \['celery', 'kerberos'\] | | -| testable-providers-integrations | List of core integrations that are testable in the build as JSON array | \['mongo', 'kafka'\] | | -| upgrade-to-newer-dependencies | Whether the image build should attempt to upgrade all dependencies (true/false or commit hash) | false | | +| Output | Meaning of the output | Example value | List | +|---------------------------------------------------------|--------------------------------------------------------------------------------------------------------|------------------------------------------|------| +| all-python-versions | List of all python versions there are available in the form of JSON array | \['3.10', '3.11'\] | | +| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.10 3.11 | * | +| all-versions | If set to true, then all python, k8s, DB versions are used for tests. | false | | +| basic-checks-only | Whether to run all static checks ("false") or only basic set of static checks ("true") | false | | +| build_system_changed_in_pyproject_toml | When builds system dependencies changed in pyproject.toml changed in the PR. | false | | +| ci-image-build | Whether CI image build is needed | true | | +| core-test-types-list-as-strings-in-json | Which test types should be run for unit tests for core | API Always Providers | * | +| debug-resources | Whether resources usage should be printed during parallel job execution ("true"/ "false") | false | | +| default-branch | Which branch is default for the build ("main" for main branch, "v2-4-test" for 2.4 line etc.) | main | | +| default-constraints-branch | Which branch is default for the build ("constraints-main" for main branch, "constraints-2-4" etc.) | constraints-main | | +| default-helm-version | Which Helm version to use as default | v3.9.4 | | +| default-kind-version | Which Kind version to use as default | v0.16.0 | | +| default-kubernetes-version | Which Kubernetes version to use as default | v1.25.2 | | +| default-mysql-version | Which MySQL version to use as default | 5.7 | | +| default-postgres-version | Which Postgres version to use as default | 10 | | +| default-python-version | Which Python version to use as default | 3.10 | | +| disable-airflow-repo-cache | Disables cache of the repo main cache in CI - aiflow will be installed without main installation cache | true | | +| docker-cache | Which cache should be used for images ("registry", "local" , "disabled") | registry | | +| docs-build | Whether to build documentation ("true"/"false") | true | | +| docs-list-as-string | What filter to apply to docs building - based on which documentation packages should be built | apache-airflow helm-chart google | * | +| excluded-providers-as-string c | List of providers that should be excluded from the build as space-separated string | amazon google | * | +| force-pip | Whether pip should be forced in the image build instead of uv ("true"/"false") | false | | +| full-tests-needed | Whether this build runs complete set of tests or only subset (for faster PR builds) \[1\] | false | | +| generated-dependencies-changed | Whether generated dependencies have changed ("true"/"false") | false | | +| has-migrations | Whether the PR has migrations ("true"/"false") | false | | +| hatch-build-changed | When hatch build.py changed in the PR. | false | | +| helm-test-packages-list-as-string | List of helm packages to test as JSON array | \["airflow_aux", "airflow_core"\] | * | +| helm-version | Which Helm version to use for tests | v3.15.3 | | +| include-success-outputs | Whether to include outputs of successful parallel tests ("true"/"false") | false | | +| individual-providers-test-types-list-as-strings-in-json | Which test types should be run for unit tests for providers (individually listed) | Providers[\amazon\] Providers\[google\] | * | +| is-committer-build | Whether the build is triggered by a committer | false | | +| is-legacy-ui-api-labeled | Whether the PR is labeled as legacy UI/API | false | | +| kind-version | Which Kind version to use for tests | v0.24.0 | | +| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.10-v1.25.2 3.11-v1.28.13 | * | +| kubernetes-versions | All Kubernetes versions to use for tests as JSON array | \['v1.25.2'\] | | +| kubernetes-versions-list-as-string | All Kubernetes versions to use for tests as space-separated string | v1.25.2 | * | +| latest-versions-only | If set, the number of Python, Kubernetes, DB versions will be limited to the latest ones. | false | | +| mypy-checks | List of folders to be considered for mypy checks | \["airflow_aux", "airflow_core"\] | | +| mysql-exclude | Which versions of MySQL to exclude for tests as JSON array | [] | | +| mysql-versions | Which versions of MySQL to use for tests as JSON array | \['8.0'\] | | +| needs-api-codegen | Whether "api-codegen" are needed to run ("true"/"false") | true | | +| needs-api-tests | Whether "api-tests" are needed to run ("true"/"false") | true | | +| needs-helm-tests | Whether Helm tests are needed to run ("true"/"false") | true | | +| needs-javascript-scans | Whether javascript CodeQL scans should be run ("true"/"false") | true | | +| needs-mypy | Whether mypy check is supposed to run in this build | true | | +| needs-python-scans | Whether Python CodeQL scans should be run ("true"/"false") | true | | +| only-new-ui-files | Whether only new UI files are present in the PR ("true"/"false") | false | | +| postgres-exclude | Which versions of Postgres to exclude for tests as JSON array | [] | | +| postgres-versions | Which versions of Postgres to use for tests as JSON array | \['12'\] | | +| prod-image-build | Whether PROD image build is needed | true | | +| providers-compatibility-tests-matrix | Matrix of providers compatibility tests: (python_version, airflow_version, removed_providers) | \[{}\] | | +| providers-test-types-list-as-strings-in-json | Which test types should be run for unit tests for providers | Providers Providers\[-google\] | * | +| pyproject-toml-changed | When pyproject.toml changed in the PR. | false | | +| python-versions | List of python versions to use for that build | \['3.10'\] | | +| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.10 | * | +| run-amazon-tests | Whether Amazon tests should be run ("true"/"false") | true | | +| run-kubernetes-tests | Whether Kubernetes tests should be run ("true"/"false") | true | | +| run-system-tests | Whether system tests should be run ("true"/"false") | true | | +| run-task-sdk-tests | Whether Task SDK tests should be run ("true"/"false") | true | | +| run-tests | Whether unit tests should be run ("true"/"false") | true | | +| run-ui-tests | Whether UI tests should be run ("true"/"false") | true | | +| run-www-tests | Whether Legacy WWW tests should be run ("true"/"false") | true | | +| amd-runners | List of labels assigned for runners for that build for public AMD runners | \["ubuntu-22.04"\] | | +| arm-runners | List of labels assigned for runners for that build for public ARM runners | \["ubuntu-22.04-arm"\] | | +| selected-providers-list-as-string | List of providers affected when they are selectively affected. | airbyte http | * | +| skip-pre-commits | Which pre-commits should be skipped during the static-checks run | flynt,identity | | +| skip-providers-tests | When provider tests should be skipped (on non-main branch or when no provider changes detected) | true | | +| sqlite-exclude | Which versions of Sqlite to exclude for tests as JSON array | [] | | +| testable-core-integrations | List of core integrations that are testable in the build as JSON array | \['celery', 'kerberos'\] | | +| testable-providers-integrations | List of core integrations that are testable in the build as JSON array | \['mongo', 'kafka'\] | | +| upgrade-to-newer-dependencies | Whether the image build should attempt to upgrade all dependencies (true/false or commit hash) | false | | [1] Note for deciding if `full tests needed` mode is enabled and provider.yaml files. diff --git a/dev/breeze/doc/ci/07_running_ci_locally.md b/dev/breeze/doc/ci/07_running_ci_locally.md index 6724008b8cc99..d71cddac74adc 100644 --- a/dev/breeze/doc/ci/07_running_ci_locally.md +++ b/dev/breeze/doc/ci/07_running_ci_locally.md @@ -76,14 +76,14 @@ this will change soon. To load the image from specific PR, you can use the following command: ```bash -breeze ci-image load --from-pr 12345 --python 3.9 --github-token +breeze ci-image load --from-pr 12345 --python 3.10 --github-token ``` To load the image from specific run (for example 12538475388), you can use the following command, find the run id from GitHub action runs. ```bash -breeze ci-image load --from-run 12538475388 --python 3.9 --github-token +breeze ci-image load --from-run 12538475388 --python 3.10 --github-token ``` After you load the image, you can reproduce the very exact environment that was used in the CI run by diff --git a/dev/breeze/pyproject.toml b/dev/breeze/pyproject.toml index 722ed77772b44..4d22bbc4e4ef7 100644 --- a/dev/breeze/pyproject.toml +++ b/dev/breeze/pyproject.toml @@ -36,7 +36,6 @@ classifiers = [ "Intended Audience :: Developers", "Framework :: Apache Airflow", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -88,7 +87,7 @@ name = "airflow_breeze" [tool.black] line-length = 110 -target-version = ['py39', 'py310', 'py311', 'py312'] +target-version = ['py310', 'py311', 'py312'] [tool.pytest.ini_options] addopts = "-rasl --verbosity=2 -p no:flaky -p no:nose -p no:legacypath" diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py index 64f652ff3cf0c..8fff8b0693a41 100644 --- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py @@ -194,16 +194,7 @@ def kill_process_group(build_process_group_id: int): def get_exitcode(status: int) -> int: - # In Python 3.9+ we will be able to use - # os.waitstatus_to_exitcode(status) - see https://github.com/python/cpython/issues/84275 - # but until then we need to do this ugly conversion - if os.WIFSIGNALED(status): - return -os.WTERMSIG(status) - if os.WIFEXITED(status): - return os.WEXITSTATUS(status) - if os.WIFSTOPPED(status): - return -os.WSTOPSIG(status) - return 1 + return os.waitstatus_to_exitcode(status) option_upgrade_to_newer_dependencies = click.option( @@ -1005,7 +996,7 @@ def import_mount_cache( make_sure_builder_configured(params=BuildCiParams(builder=builder)) dockerfile = """ # syntax=docker/dockerfile:1.4 - FROM python:3.9-slim-bookworm + FROM python:3.10-slim-bookworm ARG TARGETARCH ARG DEPENDENCY_CACHE_EPOCH= COPY cache.tar.gz /root/.cache.tar.gz diff --git a/dev/check_files.py b/dev/check_files.py index 91861422d29e0..b651c06f3ad38 100644 --- a/dev/check_files.py +++ b/dev/check_files.py @@ -31,7 +31,7 @@ from rich import print PROVIDERS_DOCKER = """\ -FROM ghcr.io/apache/airflow/main/ci/python3.9 +FROM ghcr.io/apache/airflow/main/ci/python3.10 RUN cd airflow-core; uv sync --no-sources # Install providers @@ -39,7 +39,7 @@ """ AIRFLOW_DOCKER = """\ -FROM python:3.9 +FROM python:3.10 # Upgrade RUN pip install "apache-airflow=={}" diff --git a/dev/provider_db_inventory.py b/dev/provider_db_inventory.py index 296938891bf21..2464b45c433b3 100755 --- a/dev/provider_db_inventory.py +++ b/dev/provider_db_inventory.py @@ -18,7 +18,7 @@ # PEP 723 compliant inline script metadata # /// script -# requires-python = ">=3.9" +# requires-python = ">=3.10" # dependencies = [ # "rich", # "pyyaml", diff --git a/dev/refresh_images.sh b/dev/refresh_images.sh index 46700cd75ee4e..9f39a51a87f3f 100755 --- a/dev/refresh_images.sh +++ b/dev/refresh_images.sh @@ -47,7 +47,7 @@ done # #mv -v ./dist/*.whl ./docker-context-files && chmod a+r ./docker-context-files/* # -#for PYTHON in 3.9 3.10 3.11 3.12 +#for PYTHON in 3.10 3.11 3.12 #do # breeze prod-image build \ # --builder airflow_cache \ diff --git a/docker-stack-docs/build-arg-ref.rst b/docker-stack-docs/build-arg-ref.rst index 776d50d280dd3..0e7125d6504b4 100644 --- a/docker-stack-docs/build-arg-ref.rst +++ b/docker-stack-docs/build-arg-ref.rst @@ -27,56 +27,56 @@ Basic arguments Those are the most common arguments that you use when you want to build a custom image. -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| Build argument | Default value | Description | -+==========================================+==========================================+=============================================+ -| ``PYTHON_BASE_IMAGE`` | ``python:3.9-slim-bookworm`` | Base python image. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_VERSION`` | :subst-code:`|airflow-version|` | version of Airflow. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_EXTRAS`` | (see below the table) | Default extras with which Airflow is | -| | | installed. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``ADDITIONAL_AIRFLOW_EXTRAS`` | | Optional additional extras with which | -| | | Airflow is installed. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_HOME`` | ``/opt/airflow`` | Airflow's HOME (that's where logs and | -| | | SQLite databases are stored). | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_USER_HOME_DIR`` | ``/home/airflow`` | Home directory of the Airflow user. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_PIP_VERSION`` | ```` | PIP version used. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_UV_VERSION`` | ```` | UV version used. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_USE_UV`` | ``false`` | Whether to use UV to build the image. | -| | | This is an experimental feature. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``UV_HTTP_TIMEOUT`` | ``300`` | Timeout in seconds for UV pull requests. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``ADDITIONAL_PIP_INSTALL_FLAGS`` | | additional ``pip`` flags passed to the | -| | | installation commands (except when | -| | | reinstalling ``pip`` itself) | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``PIP_PROGRESS_BAR`` | ``on`` | Progress bar for PIP installation | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_UID`` | ``50000`` | Airflow user UID. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_CONSTRAINTS`` | ``constraints`` | Type of constraints to build the image. | -| | | This can be ``constraints`` for regular | -| | | images or ``constraints-no-providers`` for | -| | | slim images. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ -| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | | Reference (branch or tag) from GitHub | -| | | where constraints file is taken from | -| | | It can be ``constraints-main`` or | -| | | ``constraints-2-0`` for | -| | | 2.0.* installation. In case of building | -| | | specific version you want to point it | -| | | to specific tag, for example | -| | | :subst-code:`constraints-|airflow-version|`.| -| | | Auto-detected if empty. | -+------------------------------------------+------------------------------------------+---------------------------------------------+ ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| Build argument | Default value | Description | ++==========================================+===========================================+=============================================+ +| ``PYTHON_BASE_IMAGE`` | ``python:3.10-slim-bookworm`` | Base python image. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_VERSION`` | :subst-code:`|airflow-version|` | version of Airflow. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_EXTRAS`` | (see below the table) | Default extras with which Airflow is | +| | | installed. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``ADDITIONAL_AIRFLOW_EXTRAS`` | | Optional additional extras with which | +| | | Airflow is installed. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_HOME`` | ``/opt/airflow`` | Airflow's HOME (that's where logs and | +| | | SQLite databases are stored). | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_USER_HOME_DIR`` | ``/home/airflow`` | Home directory of the Airflow user. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_PIP_VERSION`` | ```` | PIP version used. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_UV_VERSION`` | ```` | UV version used. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_USE_UV`` | ``false`` | Whether to use UV to build the image. | +| | | This is an experimental feature. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``UV_HTTP_TIMEOUT`` | ``300`` | Timeout in seconds for UV pull requests. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``ADDITIONAL_PIP_INSTALL_FLAGS`` | | additional ``pip`` flags passed to the | +| | | installation commands (except when | +| | | reinstalling ``pip`` itself) | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``PIP_PROGRESS_BAR`` | ``on`` | Progress bar for PIP installation | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_UID`` | ``50000`` | Airflow user UID. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_CONSTRAINTS`` | ``constraints`` | Type of constraints to build the image. | +| | | This can be ``constraints`` for regular | +| | | images or ``constraints-no-providers`` for | +| | | slim images. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ +| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | | Reference (branch or tag) from GitHub | +| | | where constraints file is taken from | +| | | It can be ``constraints-main`` or | +| | | ``constraints-2-0`` for | +| | | 2.0.* installation. In case of building | +| | | specific version you want to point it | +| | | to specific tag, for example | +| | | :subst-code:`constraints-|airflow-version|`.| +| | | Auto-detected if empty. | ++------------------------------------------+-------------------------------------------+---------------------------------------------+ .. note:: diff --git a/docker-stack-docs/build.rst b/docker-stack-docs/build.rst index 2876cbe319b85..620392b360e03 100644 --- a/docker-stack-docs/build.rst +++ b/docker-stack-docs/build.rst @@ -215,7 +215,7 @@ In the simplest case building your image consists of those steps: 1) Create your own ``Dockerfile`` (name it ``Dockerfile``) where you add: -* information what your image should be based on (for example ``FROM: apache/airflow:|airflow-version|-python3.9`` +* information what your image should be based on (for example ``FROM: apache/airflow:|airflow-version|-python3.10`` * additional steps that should be executed in your image (typically in the form of ``RUN ``) @@ -313,17 +313,17 @@ There are two types of images you can extend your image from: Naming conventions for the images: -+----------------+-----------------------+---------------------------------+--------------------------------------+ -| Image | Python | Standard image | Slim image | -+================+=======================+=================================+======================================+ -| Latest default | 3.12 | apache/airflow:latest | apache/airflow:slim-latest | -+----------------+-----------------------+---------------------------------+--------------------------------------+ -| Default | 3.12 | apache/airflow:X.Y.Z | apache/airflow:slim-X.Y.Z | -+----------------+-----------------------+---------------------------------+--------------------------------------+ -| Latest | 3.9,3.10,3.11,3.12 | apache/airflow:latest-pythonN.M | apache/airflow:slim-latest-pythonN.M | -+----------------+-----------------------+---------------------------------+--------------------------------------+ -| Specific | 3.9,3.10,3.11,3.12 | apache/airflow:X.Y.Z-pythonN.M | apache/airflow:slim-X.Y.Z-pythonN.M | -+----------------+-----------------------+---------------------------------+--------------------------------------+ ++----------------+-------------------+---------------------------------+--------------------------------------+ +| Image | Python | Standard image | Slim image | ++================+===================+=================================+======================================+ +| Latest default | 3.12 | apache/airflow:latest | apache/airflow:slim-latest | ++----------------+-------------------+---------------------------------+--------------------------------------+ +| Default | 3.12 | apache/airflow:X.Y.Z | apache/airflow:slim-X.Y.Z | ++----------------+-------------------+---------------------------------+--------------------------------------+ +| Latest | 3.10,3.11,3.12 | apache/airflow:latest-pythonN.M | apache/airflow:slim-latest-pythonN.M | ++----------------+-------------------+---------------------------------+--------------------------------------+ +| Specific | 3.10,3.11,3.12 | apache/airflow:X.Y.Z-pythonN.M | apache/airflow:slim-X.Y.Z-pythonN.M | ++----------------+-------------------+---------------------------------+--------------------------------------+ * The "latest" image is always the latest released stable version available. @@ -714,7 +714,7 @@ Building from PyPI packages This is the basic way of building the custom images from sources. -The following example builds the production image in version ``3.9`` with latest PyPI-released Airflow, +The following example builds the production image in version ``3.10`` with latest PyPI-released Airflow, with default set of Airflow extras and dependencies. The latest PyPI-released Airflow constraints are used automatically. .. exampleinclude:: docker-examples/customizing/stable-airflow.sh @@ -722,7 +722,7 @@ with default set of Airflow extras and dependencies. The latest PyPI-released Ai :start-after: [START build] :end-before: [END build] -The following example builds the production image in version ``3.9`` with default extras from ``2.3.0`` Airflow +The following example builds the production image in version ``3.10`` with default extras from ``2.3.0`` Airflow package. The ``2.3.0`` constraints are used automatically. .. exampleinclude:: docker-examples/customizing/pypi-selected-version.sh @@ -730,7 +730,7 @@ package. The ``2.3.0`` constraints are used automatically. :start-after: [START build] :end-before: [END build] -The following example builds the production image in version ``3.9`` with additional Airflow extras +The following example builds the production image in version ``3.10`` with additional Airflow extras (``mssql,hdfs``) from ``2.3.0`` PyPI package, and additional dependency (``oauth2client``). .. exampleinclude:: docker-examples/customizing/pypi-extras-and-deps.sh @@ -757,7 +757,7 @@ have more complex dependencies to build. Building optimized images ......................... -The following example builds the production image in version ``3.9`` with additional Airflow extras from +The following example builds the production image in version ``3.10`` with additional Airflow extras from PyPI package but it includes additional apt dev and runtime dependencies. The dev dependencies are those that require ``build-essential`` and usually need to involve recompiling @@ -815,7 +815,7 @@ a branch or tag in your repository and use the tag or branch in the URL that you In case of GitHub builds you need to pass the constraints reference manually in case you want to use specific constraints, otherwise the default ``constraints-main`` is used. -The following example builds the production image in version ``3.9`` with default extras from the latest main version and +The following example builds the production image in version ``3.10`` with default extras from the latest main version and constraints are taken from latest version of the constraints-main branch in GitHub. .. exampleinclude:: docker-examples/customizing/github-main.sh diff --git a/docker-stack-docs/docker-examples/customizing/add-build-essential-custom.sh b/docker-stack-docs/docker-examples/customizing/add-build-essential-custom.sh index 9426d8925045b..c19be12d4e5a3 100755 --- a/docker-stack-docs/docker-examples/customizing/add-build-essential-custom.sh +++ b/docker-stack-docs/docker-examples/customizing/add-build-essential-custom.sh @@ -31,7 +31,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_PYTHON_DEPS="mpi4py==3.1.6" \ --build-arg ADDITIONAL_DEV_APT_DEPS="libopenmpi-dev" \ diff --git a/docker-stack-docs/docker-examples/customizing/custom-sources.sh b/docker-stack-docs/docker-examples/customizing/custom-sources.sh index 311ba5c4d33ef..33d7bb7cff313 100755 --- a/docker-stack-docs/docker-examples/customizing/custom-sources.sh +++ b/docker-stack-docs/docker-examples/customizing/custom-sources.sh @@ -32,7 +32,7 @@ export DOCKER_BUILDKIT=1 docker build . -f Dockerfile \ --pull \ --platform 'linux/amd64' \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack,odbc" \ --build-arg ADDITIONAL_PYTHON_DEPS=" \ diff --git a/docker-stack-docs/docker-examples/customizing/github-different-repository.sh b/docker-stack-docs/docker-examples/customizing/github-different-repository.sh index 30d4c40ef808a..82a7e9870fd38 100755 --- a/docker-stack-docs/docker-examples/customizing/github-different-repository.sh +++ b/docker-stack-docs/docker-examples/customizing/github-different-repository.sh @@ -29,7 +29,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow @ https://github.com/potiuk/airflow/archive/main.tar.gz" \ --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-main" \ --build-arg CONSTRAINTS_GITHUB_REPOSITORY="potiuk/airflow" \ diff --git a/docker-stack-docs/docker-examples/customizing/github-main.sh b/docker-stack-docs/docker-examples/customizing/github-main.sh index 19ed9e7955da8..8b4e55523fd81 100755 --- a/docker-stack-docs/docker-examples/customizing/github-main.sh +++ b/docker-stack-docs/docker-examples/customizing/github-main.sh @@ -30,7 +30,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow @ https://github.com/apache/airflow/archive/main.tar.gz" \ --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-main" \ --tag "my-github-main:0.0.1" diff --git a/docker-stack-docs/docker-examples/customizing/github-v2-2-test.sh b/docker-stack-docs/docker-examples/customizing/github-v2-2-test.sh index 14a0cb01c75ee..88e5bb7f55ec6 100755 --- a/docker-stack-docs/docker-examples/customizing/github-v2-2-test.sh +++ b/docker-stack-docs/docker-examples/customizing/github-v2-2-test.sh @@ -31,7 +31,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow @ https://github.com/apache/airflow/archive/v2-2-test.tar.gz" \ --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-2" \ --tag "my-github-v2-2:0.0.1" diff --git a/docker-stack-docs/docker-examples/customizing/pypi-dev-runtime-deps.sh b/docker-stack-docs/docker-examples/customizing/pypi-dev-runtime-deps.sh index 64be44ba6a217..aa9929d97216a 100755 --- a/docker-stack-docs/docker-examples/customizing/pypi-dev-runtime-deps.sh +++ b/docker-stack-docs/docker-examples/customizing/pypi-dev-runtime-deps.sh @@ -32,7 +32,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \ diff --git a/docker-stack-docs/docker-examples/customizing/pypi-extras-and-deps.sh b/docker-stack-docs/docker-examples/customizing/pypi-extras-and-deps.sh index 4d3baa7735cc1..c86b2385289d5 100755 --- a/docker-stack-docs/docker-examples/customizing/pypi-extras-and-deps.sh +++ b/docker-stack-docs/docker-examples/customizing/pypi-extras-and-deps.sh @@ -31,7 +31,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="mssql,hdfs" \ --build-arg ADDITIONAL_PYTHON_DEPS="oauth2client" \ diff --git a/docker-stack-docs/docker-examples/customizing/pypi-selected-version.sh b/docker-stack-docs/docker-examples/customizing/pypi-selected-version.sh index 8b4b69a4db1f2..182b61f255e90 100755 --- a/docker-stack-docs/docker-examples/customizing/pypi-selected-version.sh +++ b/docker-stack-docs/docker-examples/customizing/pypi-selected-version.sh @@ -30,7 +30,7 @@ export AIRFLOW_VERSION=2.3.4 export DOCKER_BUILDKIT=1 docker build . \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --tag "my-pypi-selected-version:0.0.1" # [END build] diff --git a/docker-stack-docs/docker-examples/restricted/restricted_environments.sh b/docker-stack-docs/docker-examples/restricted/restricted_environments.sh index a41ebacf77156..d4e76e4ea4168 100755 --- a/docker-stack-docs/docker-examples/restricted/restricted_environments.sh +++ b/docker-stack-docs/docker-examples/restricted/restricted_environments.sh @@ -29,8 +29,8 @@ mkdir -p docker-context-files export AIRFLOW_VERSION="2.5.3" rm docker-context-files/*.whl docker-context-files/*.tar.gz docker-context-files/*.txt || true -curl -Lo "docker-context-files/constraints-3.9.txt" \ - "https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-3.9.txt" +curl -Lo "docker-context-files/constraints-3.10.txt" \ + "https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-3.10.txt" echo echo "Make sure you use the right python version here (should be same as in constraints)!" @@ -38,7 +38,7 @@ echo python --version pip download --dest docker-context-files \ - --constraint docker-context-files/constraints-3.9.txt \ + --constraint docker-context-files/constraints-3.10.txt \ "apache-airflow[async,celery,elasticsearch,kubernetes,postgres,redis,ssh,statsd,virtualenv]==${AIRFLOW_VERSION}" # [END download] @@ -47,7 +47,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" \ + --build-arg PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg INSTALL_MYSQL_CLIENT="false" \ @@ -55,7 +55,7 @@ docker build . \ --build-arg INSTALL_POSTGRES_CLIENT="true" \ --build-arg DOCKER_CONTEXT_FILES="docker-context-files" \ --build-arg INSTALL_DISTRIBUTIONS_FROM_CONTEXT="true" \ - --build-arg AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-3.9.txt" \ + --build-arg AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-3.10.txt" \ --tag airflow-my-restricted-environment:0.0.1 # [END build] diff --git a/docker-stack-docs/entrypoint.rst b/docker-stack-docs/entrypoint.rst index 311ae098bb53e..3ddc7d479e6ba 100644 --- a/docker-stack-docs/entrypoint.rst +++ b/docker-stack-docs/entrypoint.rst @@ -132,7 +132,7 @@ if you specify extra arguments. For example: .. code-block:: bash - docker run -it apache/airflow:3.1.0-python3.9 bash -c "ls -la" + docker run -it apache/airflow:3.1.0-python3.10 bash -c "ls -la" total 16 drwxr-xr-x 4 airflow root 4096 Jun 5 18:12 . drwxr-xr-x 1 root root 4096 Jun 5 18:12 .. @@ -144,7 +144,7 @@ you pass extra parameters. For example: .. code-block:: bash - > docker run -it apache/airflow:3.1.0-python3.9 python -c "print('test')" + > docker run -it apache/airflow:3.1.0-python3.10 python -c "print('test')" test If first argument equals to ``airflow`` - the rest of the arguments is treated as an Airflow command @@ -152,13 +152,13 @@ to execute. Example: .. code-block:: bash - docker run -it apache/airflow:3.1.0-python3.9 airflow webserver + docker run -it apache/airflow:3.1.0-python3.10 airflow webserver If there are any other arguments - they are simply passed to the "airflow" command .. code-block:: bash - > docker run -it apache/airflow:3.1.0-python3.9 help + > docker run -it apache/airflow:3.1.0-python3.10 help usage: airflow [-h] GROUP_OR_COMMAND ... positional arguments: @@ -363,7 +363,7 @@ database and creating an ``admin/admin`` Admin user with the following command: --env "_AIRFLOW_DB_MIGRATE=true" \ --env "_AIRFLOW_WWW_USER_CREATE=true" \ --env "_AIRFLOW_WWW_USER_PASSWORD=admin" \ - apache/airflow:3.1.0-python3.9 webserver + apache/airflow:3.1.0-python3.10 webserver .. code-block:: bash @@ -372,7 +372,7 @@ database and creating an ``admin/admin`` Admin user with the following command: --env "_AIRFLOW_DB_MIGRATE=true" \ --env "_AIRFLOW_WWW_USER_CREATE=true" \ --env "_AIRFLOW_WWW_USER_PASSWORD_CMD=echo admin" \ - apache/airflow:3.1.0-python3.9 webserver + apache/airflow:3.1.0-python3.10 webserver The commands above perform initialization of the SQLite database, create admin user with admin password and Admin role. They also forward local port ``8080`` to the webserver port and finally start the webserver. @@ -412,6 +412,6 @@ Example: --env "_AIRFLOW_DB_MIGRATE=true" \ --env "_AIRFLOW_WWW_USER_CREATE=true" \ --env "_AIRFLOW_WWW_USER_PASSWORD_CMD=echo admin" \ - apache/airflow:3.1.0-python3.9 webserver + apache/airflow:3.1.0-python3.10 webserver This method is only available starting from Docker image of Airflow 2.1.1 and above. diff --git a/docker-stack-docs/pyproject.toml b/docker-stack-docs/pyproject.toml index 2258fa887542c..c59a56f8a08e2 100644 --- a/docker-stack-docs/pyproject.toml +++ b/docker-stack-docs/pyproject.toml @@ -24,7 +24,7 @@ build-backend = "hatchling.build" [project] name = "apache-airflow-docker-stack" description = "Programmatically author, schedule and monitor data pipelines" -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] @@ -40,7 +40,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", diff --git a/docker-tests/pyproject.toml b/docker-tests/pyproject.toml index c242dc53e2789..b0ab22e2123de 100644 --- a/docker-tests/pyproject.toml +++ b/docker-tests/pyproject.toml @@ -26,7 +26,7 @@ description = "Docker tests for Apache Airflow" classifiers = [ "Private :: Do Not Upload", ] -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] diff --git a/docker-tests/tests/docker_tests/constants.py b/docker-tests/tests/docker_tests/constants.py index 99ffb351c2dab..1ec467343d86d 100644 --- a/docker-tests/tests/docker_tests/constants.py +++ b/docker-tests/tests/docker_tests/constants.py @@ -21,6 +21,6 @@ AIRFLOW_ROOT_PATH = Path(__file__).resolve().parents[3] -DEFAULT_PYTHON_MAJOR_MINOR_VERSION = "3.9" +DEFAULT_PYTHON_MAJOR_MINOR_VERSION = "3.10" DEFAULT_DOCKER_IMAGE = f"ghcr.io/apache/airflow/main/prod/python{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}:latest" DOCKER_IMAGE = os.environ.get("DOCKER_IMAGE") or DEFAULT_DOCKER_IMAGE diff --git a/docker-tests/tests/docker_tests/docker_utils.py b/docker-tests/tests/docker_tests/docker_utils.py index e8a67c425b163..e2c2a42165626 100644 --- a/docker-tests/tests/docker_tests/docker_utils.py +++ b/docker-tests/tests/docker_tests/docker_utils.py @@ -87,11 +87,11 @@ def display_dependency_conflict_message(): CI image: - breeze ci-image build --upgrade-to-newer-dependencies --python 3.9 + breeze ci-image build --upgrade-to-newer-dependencies --python 3.10 Production image: - breeze ci-image build --production-image --upgrade-to-newer-dependencies --python 3.9 + breeze ci-image build --production-image --upgrade-to-newer-dependencies --python 3.10 ***** End of the instructions **** """ diff --git a/helm-tests/pyproject.toml b/helm-tests/pyproject.toml index b370c400b61f2..1a718a5b95c87 100644 --- a/helm-tests/pyproject.toml +++ b/helm-tests/pyproject.toml @@ -26,7 +26,7 @@ description = "Helm tests for Apache Airflow" classifiers = [ "Private :: Do Not Upload", ] -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] diff --git a/kubernetes-tests/pyproject.toml b/kubernetes-tests/pyproject.toml index 0a161cceae980..7d912622d9346 100644 --- a/kubernetes-tests/pyproject.toml +++ b/kubernetes-tests/pyproject.toml @@ -24,7 +24,7 @@ description = "Kubernetes tests for Apache Airflow" classifiers = [ "Private :: Do Not Upload", ] -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] diff --git a/kubernetes-tests/tests/kubernetes_tests/test_kubernetes_pod_operator.py b/kubernetes-tests/tests/kubernetes_tests/test_kubernetes_pod_operator.py index 12a4ee5bb24fe..afbcf113932a6 100644 --- a/kubernetes-tests/tests/kubernetes_tests/test_kubernetes_pod_operator.py +++ b/kubernetes-tests/tests/kubernetes_tests/test_kubernetes_pod_operator.py @@ -1485,7 +1485,7 @@ def __getattr__(self, name): task = KubernetesPodOperator( task_id="dry_run_demo", name="hello-dry-run", - image="python:3.9-slim-buster", + image="python:3.10-slim-buster", cmds=["printenv"], env_vars=[ V1EnvVar(name="password", value="{{ password }}"), diff --git a/providers-summary-docs/pyproject.toml b/providers-summary-docs/pyproject.toml index 4ec9fa33e71d2..b6ce35f9b4f1e 100644 --- a/providers-summary-docs/pyproject.toml +++ b/providers-summary-docs/pyproject.toml @@ -24,7 +24,7 @@ build-backend = "hatchling.build" [project] name = "apache-airflow-providers" description = "Programmatically author, schedule and monitor data pipelines" -requires-python = "~=3.9,<3.13" +requires-python = "~=3.10,<3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] @@ -40,7 +40,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", diff --git a/providers/airbyte/README.rst b/providers/airbyte/README.rst index eb26b838ae4c1..3dcd839447130 100644 --- a/providers/airbyte/README.rst +++ b/providers/airbyte/README.rst @@ -45,7 +45,7 @@ You can install this package on top of an existing Airflow 2 installation (see ` for the minimum Airflow version supported) via ``pip install apache-airflow-providers-airbyte`` -The package supports the following python versions: 3.9,3.10,3.11,3.12 +The package supports the following python versions: 3.10,3.11,3.12 Requirements ------------ diff --git a/providers/alibaba/README.rst b/providers/alibaba/README.rst index d8ca8bd2672f9..c1371f27cbe28 100644 --- a/providers/alibaba/README.rst +++ b/providers/alibaba/README.rst @@ -45,7 +45,7 @@ You can install this package on top of an existing Airflow 2 installation (see ` for the minimum Airflow version supported) via ``pip install apache-airflow-providers-alibaba`` -The package supports the following python versions: 3.9,3.10,3.11,3.12 +The package supports the following python versions: 3.10,3.11,3.12 Requirements ------------ diff --git a/providers/amazon/docs/executors/general.rst b/providers/amazon/docs/executors/general.rst index 4928e2d8f7a26..9edcc7bd1aa60 100644 --- a/providers/amazon/docs/executors/general.rst +++ b/providers/amazon/docs/executors/general.rst @@ -142,9 +142,9 @@ which is running the Airflow scheduler process (and thus, the |executorName| executor.) Apache Airflow images with specific python versions can be downloaded from the Dockerhub registry, and filtering tags by the `python -version `__. -For example, the tag ``latest-python3.9`` specifies that the image will -have python 3.9 installed. +version `__. +For example, the tag ``latest-python3.10`` specifies that the image will +have python 3.10 installed. .. END BASE_IMAGE diff --git a/providers/edge3/docs/install_on_windows.rst b/providers/edge3/docs/install_on_windows.rst index 850de56ee7cdb..7144800e9b200 100644 --- a/providers/edge3/docs/install_on_windows.rst +++ b/providers/edge3/docs/install_on_windows.rst @@ -29,7 +29,7 @@ Install Edge Worker on Windows The setup was tested on Windows 10 with Python 3.12.8, 64-bit. Backend for tests was Airflow 2.10.5. To setup a instance of Edge Worker on Windows, you need to follow the steps below: -1. Install Python 3.9 or higher. +1. Install Python 3.10 or higher. 2. Create an empty folder as base to start with. In our example it is ``C:\Airflow``. 3. Start Shell/Command Line in ``C:\Airflow`` and create a new virtual environment via: ``python -m venv venv`` 4. Activate the virtual environment via: ``venv\Scripts\activate.bat`` diff --git a/scripts/ci/airflow_version_check.py b/scripts/ci/airflow_version_check.py index 45354d298847f..467f71442ce09 100755 --- a/scripts/ci/airflow_version_check.py +++ b/scripts/ci/airflow_version_check.py @@ -17,7 +17,7 @@ # under the License. # /// script -# requires-python = ">=3.9" +# requires-python = ">=3.10" # dependencies = [ # "packaging>=23.2", # "requests>=2.28.1", diff --git a/scripts/ci/docker-compose/devcontainer.env b/scripts/ci/docker-compose/devcontainer.env index 8f7ae95aba554..f639e6882d814 100644 --- a/scripts/ci/docker-compose/devcontainer.env +++ b/scripts/ci/docker-compose/devcontainer.env @@ -15,10 +15,10 @@ # specific language governing permissions and limitations # under the License. HOME= -AIRFLOW_CI_IMAGE="ghcr.io/apache/airflow/main/ci/python3.9:latest" +AIRFLOW_CI_IMAGE="ghcr.io/apache/airflow/main/ci/python3.10:latest" ANSWER= AIRFLOW_ENV="development" -PYTHON_MAJOR_MINOR_VERSION="3.9" +PYTHON_MAJOR_MINOR_VERSION="3.10" AIRFLOW_EXTRAS= BASE_BRANCH="main" BREEZE="true" diff --git a/scripts/ci/docker-compose/devcontainer.yml b/scripts/ci/docker-compose/devcontainer.yml index f3f77f253f57c..2d0d918ea8210 100644 --- a/scripts/ci/docker-compose/devcontainer.yml +++ b/scripts/ci/docker-compose/devcontainer.yml @@ -19,7 +19,7 @@ services: airflow: stdin_open: true # docker run -i tty: true # docker run -t - image: ghcr.io/apache/airflow/main/ci/python3.9 + image: ghcr.io/apache/airflow/main/ci/python3.10 env_file: devcontainer.env ports: - "22:22" diff --git a/scripts/ci/pre_commit/check_min_python_version.py b/scripts/ci/pre_commit/check_min_python_version.py index 825b899241816..a7b4fbbd6f19e 100755 --- a/scripts/ci/pre_commit/check_min_python_version.py +++ b/scripts/ci/pre_commit/check_min_python_version.py @@ -26,7 +26,7 @@ from common_precommit_utils import console # update this version when we switch to a newer version of Python -required_version = tuple(map(int, "3.9".split("."))) +required_version = tuple(map(int, "3.10".split("."))) required_version_str = f"{required_version[0]}.{required_version[1]}" global_version = tuple( map( diff --git a/scripts/ci/pre_commit/update_providers_dependencies.py b/scripts/ci/pre_commit/update_providers_dependencies.py index 5397692c03669..5266bfc1d98ea 100755 --- a/scripts/ci/pre_commit/update_providers_dependencies.py +++ b/scripts/ci/pre_commit/update_providers_dependencies.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. # /// script -# requires-python = ">=3.9" +# requires-python = ">=3.10" # dependencies = [ # "rich>=12.4.4", # "pyyaml>=6.0.2", diff --git a/scripts/docker/entrypoint_ci.sh b/scripts/docker/entrypoint_ci.sh index e62e0a58c8002..2588ab43e717f 100755 --- a/scripts/docker/entrypoint_ci.sh +++ b/scripts/docker/entrypoint_ci.sh @@ -39,7 +39,7 @@ chmod 1777 /tmp AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd) -PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.9} +PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.10} export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} diff --git a/scripts/in_container/install_development_dependencies.py b/scripts/in_container/install_development_dependencies.py index 1b887c3d4bcd7..7658472f5829b 100755 --- a/scripts/in_container/install_development_dependencies.py +++ b/scripts/in_container/install_development_dependencies.py @@ -17,7 +17,7 @@ # specific language governing permissions and limitations # under the License. # /// script -# requires-python = ">=3.9" +# requires-python = ">=3.10" # dependencies = [ # "packaging>=23.2", # "click>=8.1.8", diff --git a/scripts/in_container/run_generate_constraints.py b/scripts/in_container/run_generate_constraints.py index 58cf5cb6afc00..03caaabbde10d 100755 --- a/scripts/in_container/run_generate_constraints.py +++ b/scripts/in_container/run_generate_constraints.py @@ -83,7 +83,7 @@ # commands that might change the installed version of apache-airflow should include "apache-airflow==X.Y.Z" # in the list of install targets to prevent Airflow accidental upgrade or downgrade. # -# Typical installation process of airflow for Python 3.9 is (with random selection of extras and custom +# Typical installation process of airflow for Python 3.10 is (with random selection of extras and custom # dependencies added), usually consists of two steps: # # 1. Reproducible installation of airflow with selected providers (note constraints are used):