diff --git a/.dockerignore b/.dockerignore index c50ed5ae24ee6..75e6291445ab6 100644 --- a/.dockerignore +++ b/.dockerignore @@ -38,6 +38,7 @@ !providers/ !task-sdk/ !airflow-ctl/ +!go-sdk/ # Add all "test" distributions !tests diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 69f10c58301a7..78f7f7b8e7c28 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -40,7 +40,9 @@ # See: https://github.com/apache/airflow/blob/main/airflow-core/src/airflow/ui/public/i18n/README.md#43-engaged-translator airflow-core/src/airflow/ui/public/i18n/locales/ar/ @shahar1 @hussein-awala # + @ahmadtfarhan airflow-core/src/airflow/ui/public/i18n/locales/de/ @jscheffl # + @TJaniF @m1racoli +airflow-core/src/airflow/ui/public/i18n/locales/es/ @bbovenzi # + @aoelvp94 airflow-core/src/airflow/ui/public/i18n/locales/he/ @eladkal @shahar1 @romsharon98 # +@Dev-iL +airflow-core/src/airflow/ui/public/i18n/locales/ko/ @jscheffl @potiuk # + @choo121600 @kgw7401 @0ne-stone airflow-core/src/airflow/ui/public/i18n/locales/nl/ @BasPH # + @DjVinnii airflow-core/src/airflow/ui/public/i18n/locales/pl/ @potiuk @mobuchowski # + @kacpermuda airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/ @Lee-W @jason810496 # + @RoyLee1224 @guan404ming diff --git a/.github/actions/install-pre-commit/action.yml b/.github/actions/install-pre-commit/action.yml index ca6dd5da78194..e200d6639a6a5 100644 --- a/.github/actions/install-pre-commit/action.yml +++ b/.github/actions/install-pre-commit/action.yml @@ -24,7 +24,7 @@ inputs: default: "3.10" uv-version: description: 'uv version to use' - default: "0.7.17" # Keep this comment to allow automatic replacement of uv version + default: "0.7.20" # Keep this comment to allow automatic replacement of uv version pre-commit-version: description: 'pre-commit version to use' default: "4.2.0" # Keep this comment to allow automatic replacement of pre-commit version diff --git a/.github/actions/migration_tests/action.yml b/.github/actions/migration_tests/action.yml index 0198cbf472b0b..34951bac5ffe0 100644 --- a/.github/actions/migration_tests/action.yml +++ b/.github/actions/migration_tests/action.yml @@ -24,9 +24,9 @@ runs: - name: "Test migration file 2 to 3 migration: ${{env.BACKEND}}" shell: bash run: | - breeze shell "${{ env.AIRFLOW_2_CMD }}" --use-airflow-version 2.11.0 --answer y && - breeze shell "export AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS=${{env.DB_MANGERS}} - ${{ env.AIRFLOW_3_CMD }}" --no-db-cleanup + breeze shell "${AIRFLOW_2_CMD}" --use-airflow-version 2.11.0 --answer y && + breeze shell "export AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS=${DB_MANGERS} + ${AIRFLOW_3_CMD}" --no-db-cleanup env: COMPOSE_PROJECT_NAME: "docker-compose" DB_RESET: "false" @@ -47,9 +47,9 @@ runs: - name: "Test ORM migration 2 to 3: ${{env.BACKEND}}" shell: bash run: > - breeze shell "${{ env.AIRFLOW_2_CMD }}" --use-airflow-version 2.11.0 --answer y && - breeze shell "export AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS=${{env.DB_MANGERS}} - ${{ env.AIRFLOW_3_CMD }}" --no-db-cleanup + breeze shell "${AIRFLOW_2_CMD}" --use-airflow-version 2.11.0 --answer y && + breeze shell "export AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS=${DB_MANGERS} + ${AIRFLOW_3_CMD}" --no-db-cleanup env: COMPOSE_PROJECT_NAME: "docker-compose" DB_RESET: "false" @@ -69,7 +69,7 @@ runs: - name: "Test ORM migration ${{env.BACKEND}}" shell: bash run: > - breeze shell "export AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS=${{env.DB_MANAGERS}} && + breeze shell "export AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS=${DB_MANAGERS} && airflow db reset -y && airflow db migrate --to-revision heads && airflow db downgrade -n 2.7.0 -y && @@ -86,7 +86,7 @@ runs: shell: bash run: > breeze shell - "export AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS=${{env.DB_MANAGERS}} && + "export AIRFLOW__DATABASE__EXTERNAL_DB_MANAGERS=${DB_MANAGERS} && airflow db reset -y && airflow db downgrade -n 2.7.0 -y && airflow db migrate -s" diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index 9c5a437d3e07b..1bfdfafef6c41 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -377,6 +377,9 @@ labelPRBasedOnFilePath: translation:de: - airflow-core/src/airflow/ui/public/i18n/locales/de/* + translation:es: + - airflow-core/src/airflow/ui/public/i18n/locales/es/* + translation:fr: - airflow-core/src/airflow/ui/public/i18n/locales/fr/* diff --git a/.github/workflows/additional-ci-image-checks.yml b/.github/workflows/additional-ci-image-checks.yml index 024f4a4ea0a93..86a75b1d31fb4 100644 --- a/.github/workflows/additional-ci-image-checks.yml +++ b/.github/workflows/additional-ci-image-checks.yml @@ -117,7 +117,7 @@ jobs: # Check that after earlier cache push, breeze command will build quickly check-that-image-builds-quickly: - timeout-minutes: 11 + timeout-minutes: 17 name: Check that image builds quickly runs-on: ${{ fromJSON(inputs.runners) }} env: @@ -144,10 +144,5 @@ jobs: uses: ./.github/actions/breeze with: use-uv: ${{ inputs.use-uv }} - - name: "Login to ghcr.io" - env: - actor: ${{ github.actor }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: echo "$GITHUB_TOKEN" | docker login ghcr.io -u "$actor" --password-stdin - name: "Check that image builds quickly" - run: breeze shell --max-time 600 --platform "${PLATFORM}" + run: breeze shell --max-time 900 --platform "${PLATFORM}" diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 161ba36d58043..45f92c8db147b 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -267,6 +267,8 @@ jobs: skip-pre-commits: ${{ inputs.skip-pre-commits }} - name: "Autoupdate all pre-commits" run: pre-commit autoupdate + - name: "Autoupdate Lucas-C/pre-commit-hooks to bleeding edge" + run: pre-commit autoupdate --bleeding-edge --freeze --repo https://github.com/Lucas-C/pre-commit-hooks - name: "Run automated upgrade for black" run: > pre-commit run @@ -284,7 +286,7 @@ jobs: # For UV we are not failing the upgrade installers check if it is updated because # it is upgraded very frequently, so we want to manually upgrade it rather than # get notified about it - until it stabilizes in 1.* version - - name: "Run automated upgrade for uv (open to see if new version is updated)" + - name: "Run automated upgrade for uv (not failing - just informational)" run: > pre-commit run --all-files --show-diff-on-failure --color always --verbose @@ -292,10 +294,13 @@ jobs: if: always() env: UPGRADE_UV: "true" + UPGRADE_PYTHON: "false" + UPGRADE_GOLANG: "true" UPGRADE_PIP: "false" UPGRADE_PRE_COMMIT: "false" UPGRADE_NODE_LTS: "false" - - name: "Run automated upgrade for pip, pre-commit and node" + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: "Run automated upgrade for pip, pre-commit and node (failing if needed)" run: > pre-commit run --all-files --show-diff-on-failure --color always --verbose @@ -303,9 +308,12 @@ jobs: if: always() env: UPGRADE_UV: "false" + UPGRADE_PYTHON: "true" + UPGRADE_GOLANG: "false" UPGRADE_PIP: "true" UPGRADE_PRE_COMMIT: "true" UPGRADE_NODE_LTS: "true" + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} test-airflow-release-commands: timeout-minutes: 80 diff --git a/.github/workflows/ci-amd.yml b/.github/workflows/ci-amd.yml index bb88a352f8c2e..93f8c536556d3 100644 --- a/.github/workflows/ci-amd.yml +++ b/.github/workflows/ci-amd.yml @@ -186,7 +186,7 @@ jobs: - name: Install pre-commit, uv, and pre-commit-uv shell: bash env: - UV_VERSION: "0.7.17" # Keep this comment to allow automatic replacement of uv version + UV_VERSION: "0.7.20" # Keep this comment to allow automatic replacement of uv version PRE_COMMIT_VERSION: "4.2.0" # Keep this comment to allow automatic replacement of pre-commit version PRE_COMMIT_UV_VERSION: "4.1.4" # Keep this comment to allow automatic replacement of pre-commit-uv version run: | diff --git a/.github/workflows/publish-docs-to-s3.yml b/.github/workflows/publish-docs-to-s3.yml index 3166c8919086e..6c1eb1dd48a1d 100644 --- a/.github/workflows/publish-docs-to-s3.yml +++ b/.github/workflows/publish-docs-to-s3.yml @@ -189,6 +189,7 @@ jobs: - name: "Reinstall breeze from the current version" run: | breeze setup self-upgrade --use-current-airflow-sources + if: inputs.build-sboms working-directory: current-version - name: "Make sure SBOM dir exists and has the right permissions" run: | @@ -212,6 +213,7 @@ jobs: run: | echo "Generated SBOM files:" find ./generated/_build/docs/apache-airflow/stable/sbom/ -type f | sort + if: inputs.build-sboms - name: "Reinstall breeze from ${{ inputs.ref }} reference" run: breeze setup self-upgrade --use-current-airflow-sources diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 13b7121241e9d..790cc33fa213d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -50,7 +50,7 @@ repos: - "2" - repo: https://github.com/Lucas-C/pre-commit-hooks # replace hash with version once PR #103 merged comes in a release - rev: fd3fbe825390abc682953165e9aa58f5f1bf7339 + rev: abdd8b62891099da34162217ecb3872d22184a51 hooks: - id: insert-license name: Add license for all SQL files @@ -213,7 +213,7 @@ repos: ^scripts/ci/pre_commit/update_installers_and_pre_commit\.py$ pass_filenames: false require_serial: true - additional_dependencies: ['pyyaml>=6.0.2', 'rich>=12.4.4', 'requests>=2.31.0'] + additional_dependencies: ['pyyaml>=6.0.2', 'rich>=12.4.4', 'requests>=2.31.0',"packaging>=25"] - id: update-chart-dependencies name: Update chart dependencies to latest (manual) entry: ./scripts/ci/pre_commit/update_chart_dependencies.py @@ -364,7 +364,7 @@ repos: - --skip=providers/.*/src/airflow/providers/*/*.rst,providers/*/docs/changelog.rst,docs/*/commits.rst,providers/*/docs/commits.rst,providers/*/*/docs/commits.rst,docs/apache-airflow/tutorial/pipeline_example.csv,*.min.js,*.lock,INTHEWILD.md,*.svg - --exclude-file=.codespellignorelines - repo: https://github.com/woodruffw/zizmor-pre-commit - rev: v1.7.0 + rev: v1.11.0 hooks: - id: zizmor name: Run zizmor to check for github workflow syntax errors @@ -427,7 +427,7 @@ repos: types_or: [python, pyi] args: [--fix] require_serial: true - additional_dependencies: ['ruff==0.12.1'] + additional_dependencies: ['ruff==0.12.3'] exclude: ^airflow-core/tests/unit/dags/test_imports\.py$|^performance/tests/test_.*\.py$ - id: ruff-format name: Run 'ruff format' @@ -437,7 +437,7 @@ repos: types_or: [python, pyi] args: [] require_serial: true - additional_dependencies: ['ruff==0.12.1'] + additional_dependencies: ['ruff==0.12.3'] exclude: ^airflow-core/tests/unit/dags/test_imports\.py$ - id: replace-bad-characters name: Replace bad characters @@ -1590,13 +1590,13 @@ repos: name: Check imports in providers entry: ./scripts/ci/pre_commit/check_imports_in_providers.py language: python - additional_dependencies: ['rich>=12.4.4', 'ruff==0.12.1'] + additional_dependencies: ['rich>=12.4.4', 'ruff==0.12.3'] files: ^providers/.*/src/airflow/providers/.*version_compat.*\.py$ require_serial: true - id: provider-version-compat name: Check for correct version_compat imports in providers entry: ./scripts/ci/pre_commit/check_provider_version_compat.py - language: system + language: python types: [python] files: ^providers/.*/src/airflow/providers/.*\.py$ require_serial: true diff --git a/Dockerfile b/Dockerfile index 194c27645d550..e8aea12f0d009 100644 --- a/Dockerfile +++ b/Dockerfile @@ -57,7 +57,7 @@ ARG PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" ARG AIRFLOW_PIP_VERSION=25.1.1 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" ARG AIRFLOW_SETUPTOOLS_VERSION=80.9.0 -ARG AIRFLOW_UV_VERSION=0.7.17 +ARG AIRFLOW_UV_VERSION=0.7.20 ARG AIRFLOW_USE_UV="false" ARG UV_HTTP_TIMEOUT="300" ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" @@ -473,11 +473,14 @@ function common::get_packaging_tool() { echo export PACKAGING_TOOL="uv" export PACKAGING_TOOL_CMD="uv pip" - if [[ ${AIRFLOW_INSTALLATION_METHOD=} == "." && -f "./pyproject.toml" ]]; then + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 + if [[ ${AIRFLOW_INSTALLATION_METHOD=} == "." && -f "./pyproject.toml" ]]; then # for uv only install dev group when we install from sources - export EXTRA_INSTALL_FLAGS="--group=dev" + export EXTRA_INSTALL_FLAGS="--group=dev --no-binary lxml --no-binary xmlsec" else - export EXTRA_INSTALL_FLAGS="" + export EXTRA_INSTALL_FLAGS="--no-binary lxml --no-binary xmlsec" fi export EXTRA_UNINSTALL_FLAGS="" export UPGRADE_TO_HIGHEST_RESOLUTION="--upgrade --resolution highest" @@ -493,7 +496,10 @@ function common::get_packaging_tool() { echo export PACKAGING_TOOL="pip" export PACKAGING_TOOL_CMD="pip" - export EXTRA_INSTALL_FLAGS="--root-user-action ignore" + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 + export EXTRA_INSTALL_FLAGS="--root-user-action ignore --no-binary lxml,xmlsec" export EXTRA_UNINSTALL_FLAGS="--yes" export UPGRADE_TO_HIGHEST_RESOLUTION="--upgrade --upgrade-strategy eager" export UPGRADE_IF_NEEDED="--upgrade --upgrade-strategy only-if-needed" @@ -875,8 +881,12 @@ function install_from_sources() { echo echo "${COLOR_BLUE}Attempting to upgrade all packages to highest versions.${COLOR_RESET}" echo + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 set -x - uv sync --all-packages --resolution highest --group dev --group docs --group docs-gen --group leveldb ${extra_sync_flags} + uv sync --all-packages --resolution highest --group dev --group docs --group docs-gen \ + --group leveldb ${extra_sync_flags} --no-binary-package lxml --no-binary-package xmlsec else # We only use uv here but Installing using constraints is not supported with `uv sync`, so we # do not use ``uv sync`` because we are not committing and using uv.lock yet. @@ -933,8 +943,12 @@ function install_from_sources() { echo echo "${COLOR_BLUE}Falling back to no-constraints installation.${COLOR_RESET}" echo + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 set -x - uv sync --all-packages --group dev --group docs --group docs-gen --group leveldb ${extra_sync_flags} + uv sync --all-packages --group dev --group docs --group docs-gen \ + --group leveldb ${extra_sync_flags} --no-binary-package lxml --no-binary-package xmlsec set +x fi fi diff --git a/Dockerfile.ci b/Dockerfile.ci index 6ba138121af1a..6ad40a1e62cd6 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -16,13 +16,13 @@ # # WARNING: THIS DOCKERFILE IS NOT INTENDED FOR PRODUCTION USE OR DEPLOYMENT. # -ARG PYTHON_BASE_IMAGE="python:3.10-slim-bookworm" +ARG BASE_IMAGE="debian:bookworm-slim" ############################################################################################## # This is the script image where we keep all inlined bash scripts needed in other segments -# We use PYTHON_BASE_IMAGE to make sure that the scripts are different for different platforms. +# We use BASE_IMAGE to make sure that the scripts are different for different platforms. ############################################################################################## -FROM ${PYTHON_BASE_IMAGE} as scripts +FROM ${BASE_IMAGE} as scripts ############################################################################################## # Please DO NOT modify the inlined scripts manually. The content of those files will be @@ -31,22 +31,27 @@ FROM ${PYTHON_BASE_IMAGE} as scripts # make the PROD Dockerfile standalone ############################################################################################## -# The content below is automatically copied from scripts/docker/install_os_dependencies.sh -COPY <<"EOF" /install_os_dependencies.sh +# The content below is automatically copied from scripts/docker/install_os_dependencies_ci.sh +COPY <<"EOF" /install_os_dependencies_ci.sh #!/usr/bin/env bash set -euo pipefail if [[ "$#" != 1 ]]; then - echo "ERROR! There should be 'runtime' or 'dev' parameter passed as argument.". + echo "ERROR! There should be 'runtime', 'ci' or 'dev' parameter passed as argument.". exit 1 fi +AIRFLOW_PYTHON_VERSION=${AIRFLOW_PYTHON_VERSION:-v3.10.10} +GOLANG_MAJOR_MINOR_VERSION=${GOLANG_MAJOR_MINOR_VERSION:-1.24.4} + if [[ "${1}" == "runtime" ]]; then INSTALLATION_TYPE="RUNTIME" elif [[ "${1}" == "dev" ]]; then - INSTALLATION_TYPE="dev" + INSTALLATION_TYPE="DEV" +elif [[ "${1}" == "ci" ]]; then + INSTALLATION_TYPE="CI" else - echo "ERROR! Wrong argument. Passed ${1} and it should be one of 'runtime' or 'dev'.". + echo "ERROR! Wrong argument. Passed ${1} and it should be one of 'runtime', 'ci' or 'dev'.". exit 1 fi @@ -56,7 +61,10 @@ function get_dev_apt_deps() { freetds-bin freetds-dev git graphviz graphviz-dev krb5-user ldap-utils libev4 libev-dev libffi-dev libgeos-dev \ libkrb5-dev libldap2-dev libleveldb1d libleveldb-dev libsasl2-2 libsasl2-dev libsasl2-modules \ libssl-dev libxmlsec1 libxmlsec1-dev locales lsb-release openssh-client pkgconf sasl2-bin \ -software-properties-common sqlite3 sudo unixodbc unixodbc-dev zlib1g-dev" +software-properties-common sqlite3 sudo unixodbc unixodbc-dev zlib1g-dev \ +gdb lcov pkg-config libbz2-dev libgdbm-dev libgdbm-compat-dev liblzma-dev \ +libncurses5-dev libreadline6-dev libsqlite3-dev lzma lzma-dev tk-dev uuid-dev \ +libzstd-dev" export DEV_APT_DEPS fi } @@ -143,14 +151,36 @@ function install_debian_runtime_dependencies() { rm -rf /var/lib/apt/lists/* /var/log/* } +function install_python() { + git clone --branch "${AIRFLOW_PYTHON_VERSION}" --depth 1 https://github.com/python/cpython.git + cd cpython + ./configure --enable-optimizations + make -s -j "$(nproc)" all + make -s -j "$(nproc)" install + ln -s /usr/local/bin/python3 /usr/local/bin/python + ln -s /usr/local/bin/pip3 /usr/local/bin/pip + cd .. + rm -rf cpython +} + +function install_golang() { + curl "https://dl.google.com/go/go${GOLANG_MAJOR_MINOR_VERSION}.linux-$(dpkg --print-architecture).tar.gz" -o "go${GOLANG_MAJOR_MINOR_VERSION}.linux.tar.gz" + rm -rf /usr/local/go && tar -C /usr/local -xzf go"${GOLANG_MAJOR_MINOR_VERSION}".linux.tar.gz +} + if [[ "${INSTALLATION_TYPE}" == "RUNTIME" ]]; then get_runtime_apt_deps install_debian_runtime_dependencies install_docker_cli else + get_dev_apt_deps install_debian_dev_dependencies + install_python + if [[ "${INSTALLATION_TYPE}" == "CI" ]]; then + install_golang + fi install_docker_cli fi EOF @@ -412,11 +442,14 @@ function common::get_packaging_tool() { echo export PACKAGING_TOOL="uv" export PACKAGING_TOOL_CMD="uv pip" - if [[ ${AIRFLOW_INSTALLATION_METHOD=} == "." && -f "./pyproject.toml" ]]; then + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 + if [[ ${AIRFLOW_INSTALLATION_METHOD=} == "." && -f "./pyproject.toml" ]]; then # for uv only install dev group when we install from sources - export EXTRA_INSTALL_FLAGS="--group=dev" + export EXTRA_INSTALL_FLAGS="--group=dev --no-binary lxml --no-binary xmlsec" else - export EXTRA_INSTALL_FLAGS="" + export EXTRA_INSTALL_FLAGS="--no-binary lxml --no-binary xmlsec" fi export EXTRA_UNINSTALL_FLAGS="" export UPGRADE_TO_HIGHEST_RESOLUTION="--upgrade --resolution highest" @@ -432,7 +465,10 @@ function common::get_packaging_tool() { echo export PACKAGING_TOOL="pip" export PACKAGING_TOOL_CMD="pip" - export EXTRA_INSTALL_FLAGS="--root-user-action ignore" + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 + export EXTRA_INSTALL_FLAGS="--root-user-action ignore --no-binary lxml,xmlsec" export EXTRA_UNINSTALL_FLAGS="--yes" export UPGRADE_TO_HIGHEST_RESOLUTION="--upgrade --upgrade-strategy eager" export UPGRADE_IF_NEEDED="--upgrade --upgrade-strategy only-if-needed" @@ -628,8 +664,12 @@ function install_from_sources() { echo echo "${COLOR_BLUE}Attempting to upgrade all packages to highest versions.${COLOR_RESET}" echo + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 set -x - uv sync --all-packages --resolution highest --group dev --group docs --group docs-gen --group leveldb ${extra_sync_flags} + uv sync --all-packages --resolution highest --group dev --group docs --group docs-gen \ + --group leveldb ${extra_sync_flags} --no-binary-package lxml --no-binary-package xmlsec else # We only use uv here but Installing using constraints is not supported with `uv sync`, so we # do not use ``uv sync`` because we are not committing and using uv.lock yet. @@ -686,8 +726,12 @@ function install_from_sources() { echo echo "${COLOR_BLUE}Falling back to no-constraints installation.${COLOR_RESET}" echo + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 set -x - uv sync --all-packages --group dev --group docs --group docs-gen --group leveldb ${extra_sync_flags} + uv sync --all-packages --group dev --group docs --group docs-gen \ + --group leveldb ${extra_sync_flags} --no-binary-package lxml --no-binary-package xmlsec set +x fi fi @@ -925,7 +969,7 @@ function environment_initialization() { CI=${CI:="false"} # Added to have run-tests on path - export PATH=${PATH}:${AIRFLOW_SOURCES} + export PATH=${PATH}:${AIRFLOW_SOURCES}:/usr/local/go/bin/ mkdir -pv "${AIRFLOW_HOME}/logs/" @@ -1138,13 +1182,19 @@ function check_force_lowest_dependencies() { exit 0 fi cd "${AIRFLOW_SOURCES}/providers/${provider_id/.//}" || exit 1 - uv sync --resolution lowest-direct + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 + uv sync --resolution lowest-direct --no-binary-package lxml --no-binary-package xmlsec else echo echo "${COLOR_BLUE}Forcing dependencies to lowest versions for Airflow.${COLOR_RESET}" echo cd "${AIRFLOW_SOURCES}/airflow-core" - uv sync --resolution lowest-direct + # --no-binary is needed in order to avoid libxml and xmlsec using different version of libxml2 + # (binary lxml embeds its own libxml2, while xmlsec uses system one). + # See https://bugs.launchpad.net/lxml/+bug/2110068 + uv sync --resolution lowest-direct --no-binary-package lxml --no-binary-package xmlsec fi } @@ -1237,13 +1287,13 @@ COPY <<"EOF" /entrypoint_exec.sh exec /bin/bash "${@}" EOF -FROM ${PYTHON_BASE_IMAGE} as main +FROM ${BASE_IMAGE} as main # Nolog bash flag is currently ignored - but you can replace it with other flags (for example # xtrace - to show commands executed) SHELL ["/bin/bash", "-o", "pipefail", "-o", "errexit", "-o", "nounset", "-o", "nolog", "-c"] -ARG PYTHON_BASE_IMAGE +ARG BASE_IMAGE ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" # By increasing this number we can do force build of all dependencies. @@ -1253,7 +1303,7 @@ ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" ARG DEPENDENCIES_EPOCH_NUMBER="15" # Make sure noninteractive debian install is used and language variables set -ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \ +ENV BASE_IMAGE=${BASE_IMAGE} \ DEBIAN_FRONTEND=noninteractive LANGUAGE=C.UTF-8 LANG=C.UTF-8 LC_ALL=C.UTF-8 \ LC_CTYPE=C.UTF-8 LC_MESSAGES=C.UTF-8 \ DEPENDENCIES_EPOCH_NUMBER=${DEPENDENCIES_EPOCH_NUMBER} \ @@ -1264,7 +1314,7 @@ ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \ UV_CACHE_DIR=/root/.cache/uv -RUN echo "Base image version: ${PYTHON_BASE_IMAGE}" +RUN echo "Base image version: ${BASE_IMAGE}" ARG DEV_APT_COMMAND="" ARG ADDITIONAL_DEV_APT_COMMAND="" @@ -1279,8 +1329,13 @@ ENV DEV_APT_COMMAND=${DEV_APT_COMMAND} \ ADDITIONAL_DEV_APT_DEPS=${ADDITIONAL_DEV_APT_DEPS} \ ADDITIONAL_DEV_APT_COMMAND=${ADDITIONAL_DEV_APT_COMMAND} -COPY --from=scripts install_os_dependencies.sh /scripts/docker/ -RUN bash /scripts/docker/install_os_dependencies.sh dev +ARG AIRFLOW_PYTHON_VERSION=v3.10.18 +ENV AIRFLOW_PYTHON_VERSION=$AIRFLOW_PYTHON_VERSION +ENV GOLANG_MAJOR_MINOR_VERSION=1.24.5 + +COPY --from=scripts install_os_dependencies_ci.sh /scripts/docker/ + +RUN bash /scripts/docker/install_os_dependencies_ci.sh ci COPY --from=scripts common.sh /scripts/docker/ @@ -1407,7 +1462,7 @@ COPY --from=scripts common.sh install_packaging_tools.sh install_additional_depe ARG AIRFLOW_PIP_VERSION=25.1.1 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" ARG AIRFLOW_SETUPTOOLS_VERSION=80.9.0 -ARG AIRFLOW_UV_VERSION=0.7.17 +ARG AIRFLOW_UV_VERSION=0.7.20 # TODO(potiuk): automate with upgrade check (possibly) ARG AIRFLOW_PRE_COMMIT_VERSION="4.2.0" ARG AIRFLOW_PRE_COMMIT_UV_VERSION="4.1.4" diff --git a/INTHEWILD.md b/INTHEWILD.md index 85375095d3915..e882d504f6da8 100644 --- a/INTHEWILD.md +++ b/INTHEWILD.md @@ -31,6 +31,7 @@ Currently, **officially** using Airflow: 1. [90 Seconds](https://90seconds.tv/) [[@aaronmak](https://github.com/aaronmak)] 1. [99](https://99taxis.com) [[@fbenevides](https://github.com/fbenevides), [@gustavoamigo](https://github.com/gustavoamigo) & [@mmmaia](https://github.com/mmmaia)] 1. [Accenture](https://www.accenture.com/au-en) [[@nijanthanvijayakumar](https://github.com/nijanthanvijayakumar)] +1. [Acciona Energia France](https://solutions.acciona-energia.fr/) [[@MohamedEqinov](https://github.com/MohamedEqinov)] 1. [AdBOOST](https://www.adboost.sk) [[AdBOOST](https://github.com/AdBOOST)] 1. [Adobe](https://www.adobe.com/) [[@mishikaSingh](https://github.com/mishikaSingh), [@ramandumcs](https://github.com/ramandumcs), [@vardancse](https://github.com/vardancse)] 1. [Adyen](https://www.adyen.com/) [[@jorricks](https://github.com/jorricks), [@MaicoTimmerman](https://github.com/MaicoTimmerman)] @@ -62,7 +63,7 @@ Currently, **officially** using Airflow: 1. [Asana](https://asana.com/) [[@chang](https://github.com/chang), [@dima-asana](https://github.com/dima-asana), [@jdavidheiser](https://github.com/jdavidheiser), [@ricardoandresrojas](https://github.com/ricardoandresrojas)] 1. [Astronomer](https://www.astronomer.io) [[@schnie](https://github.com/schnie), [@ashb](https://github.com/ashb), [@kaxil](https://github.com/kaxil), [@dimberman](https://github.com/dimberman), [@andriisoldatenko](https://github.com/andriisoldatenko), [@ryw](https://github.com/ryw), [@ryanahamilton](https://github.com/ryanahamilton), [@jhtimmins](https://github.com/jhtimmins), [@vikramkoka](https://github.com/vikramkoka), [@jedcunningham](https://github.com/jedcunningham), [@BasPH](https://github.com/basph), [@ephraimbuddy](https://github.com/ephraimbuddy), [@feluelle](https://github.com/feluelle)] 1. [Audiomack](https://audiomack.com) [[@billcrook](https://github.com/billcrook)] -1. [Auth0](https://auth0.com) [[@scottypate](https://github.com/scottypate)], [[@dm03514](https://github.com/dm03514)], [[@karangale](https://github.com/karangale)] +1. [Auth0](https://auth0.com) [[@scottypate](https://github.com/scottypate), [@dm03514](https://github.com/dm03514), [@karangale](https://github.com/karangale)] 1. [Autodesk](https://autodesk.com) 1. [Automattic](https://automattic.com/) [[@anandnalya](https://github.com/anandnalya), [@bperson](https://github.com/bperson), [@khrol](https://github.com/Khrol), [@xyu](https://github.com/xyu)] 1. [Avesta Technologies](https://avestatechnologies.com) [[@TheRum](https://github.com/TheRum)] @@ -90,10 +91,10 @@ Currently, **officially** using Airflow: 1. [BlaBlaCar](https://www.blablacar.com) [[@puckel](https://github.com/puckel) & [@wmorin](https://github.com/wmorin)] 1. [Blacklane](https://www.blacklane.com) [[@serkef](https://github.com/serkef)] 1. [Bloc](https://www.bloc.io) [[@dpaola2](https://github.com/dpaola2)] -1. [Bloomberg](https://www.techatbloomberg.com) [[@skandala23] (https://github.com/skandala23) & [@vfeldsher](https://https://github.com/vfeldsher)] +1. [Bloomberg](https://www.techatbloomberg.com) [[@skandala23](https://github.com/skandala23) & [@vfeldsher](https://https://github.com/vfeldsher)] 1. [Bloomreach](https://www.bloomreach.com/) [[@neelborooah](https://github.com/neelborooah) & [@debodirno](https://github.com/debodirno) & [@ayushmnnit](https://github.com/ayushmnnit)] 1. [Blue Yonder](http://www.blue-yonder.com) [[@blue-yonder](https://github.com/blue-yonder)] -1. [Blue3 Investimentos](https://blue3investimentos.com.br) [[@ericcoleta] (https://github.com/ericcoleta) & [@plutaniano](https://github.com/plutaniano)] +1. [Blue3 Investimentos](https://blue3investimentos.com.br) [[@ericcoleta](https://github.com/ericcoleta) & [@plutaniano](https://github.com/plutaniano)] 1. [BlueApron](https://www.blueapron.com) [[@jasonjho](https://github.com/jasonjho) & [@matthewdavidhauser](https://github.com/matthewdavidhauser)] 1. [Bluecore](https://www.bluecore.com) [[@JLDLaughlin](https://github.com/JLDLaughlin)] 1. [Bluekiri](https://bluekiri.com) [[@Bluekiri](https://github.com/bluekiri)] @@ -118,7 +119,7 @@ Currently, **officially** using Airflow: 1. [Capital One](https://www.capitalone.com) [[@anoopengineer](https://github.com/anoopengineer)] 1. [Carbonite](https://www.carbonite.com) [[@ajbosco](https://github.com/ajbosco)] 1. [CarLabs](https://www.carlabs.ai/) [[@sganz](https://github.com/sganz) & [@odannyc](https://github.com/odannyc)] -1. [Carpe Data](https://www.carpe.io/) [[@manugarri](https://github.com/manugarri)]] +1. [Carpe Data](https://www.carpe.io/) [[@manugarri](https://github.com/manugarri)] 1. [CAVA](https://www.cava.com) [[@minh5](https://github.com/minh5) & [@patchus](https://github.com/patchus)] 1. [Celect](http://www.celect.com) [[@superdosh](https://github.com/superdosh) & [@chadcelect](https://github.com/chadcelect)] 1. [Censys](https://censys.io) [[@zakird](https://github.com/zakird), [@dadrian](https://github.com/dadrian), & [@andrewsardone](https://github.com/andrewsardone)] @@ -139,7 +140,7 @@ Currently, **officially** using Airflow: 1. [Classmethod, Inc.](https://classmethod.jp/) [[@shoito](https://github.com/shoito)] 1. [Cleartax](https://cleartax.in/) [[@anks](https://github.com/anks) & [@codebuff](https://github.com/codebuff)] 1. [Clicksign](https://clicksign.com/) [[@mbbernstein](https://github.com/mbbernstein) & [@jorgeac12](https://github.com/jorgeac12) & [@franklin390](https://github.com/franklin390)] -1. [Cloudera](https://www.cloudera.com/) [[@phraniiac](https://github.com/phraniiac) & [@VivekPemawat](https://github.com/VivekPemawat) & [@amoghrajesh](https://github.com/amoghrajesh) & [@vedantlodha](https://github.com/vedantlodha) & [@shubhamraj-git](https://github.com/shubhamraj-git) & [@Samit-Maharjan](https://github.com/Samit-Maharjan)] & [@anukrati1507](https://github.com/anukrati1507) +1. [Cloudera](https://www.cloudera.com/) [[@phraniiac](https://github.com/phraniiac) & [@VivekPemawat](https://github.com/VivekPemawat) & [@amoghrajesh](https://github.com/amoghrajesh) & [@vedantlodha](https://github.com/vedantlodha) & [@shubhamraj-git](https://github.com/shubhamraj-git) & [@Samit-Maharjan](https://github.com/Samit-Maharjan) & [@anukrati1507](https://github.com/anukrati1507)] 1. [Clover Health](https://www.cloverhealth.com) [[@ryansiu1995](https://github.com/ryansiu1995)] 1. [Coinbase](https://www.coinbase.com) [[@mingshi-wang](https://github.com/mingshi-wang)] 1. [Coinone](https://www.coinonecorp.com) [[@jx2lee](https://github.com/jx2lee)] @@ -176,16 +177,17 @@ Currently, **officially** using Airflow: 1. [dataroots](https://dataroots.io/) [[@datarootsio]](https://github.com/datarootsio) 1. [DataSprints](https://datasprints.com/) [[@lopesdiego12](https://github.com/lopesdiego12) & [@rafaelsantanaep](https://github.com/rafaelsantanaep)] 1. [Datatonic](https://datatonic.com/) [[@teamdatatonic](https://github.com/teamdatatonic)] -1. [Datavant](https://datavant.com)/) [@althati(https://github.com/althati)] +1. [Datavant](https://datavant.com) [[@althati](https://github.com/althati)] 1. [Datumo](https://datumo.io) [[@michalmisiewicz](https://github.com/michalmisiewicz)] 1. [Dcard](https://www.dcard.tw/) [[@damon09273](https://github.com/damon09273) & [@bruce3557](https://github.com/bruce3557) & [@kevin1kevin1k](http://github.com/kevin1kevin1k)] 1. [Delft University of Technology](https://www.tudelft.nl/en/) [[@saveriogzz](https://github.com/saveriogzz)] 1. [Dentsu Inc.](http://www.dentsu.com/) [[@bryan831](https://github.com/bryan831) & [@loozhengyuan](https://github.com/loozhengyuan)] -1. [Deseret Digital Media](http://deseretdigital.com/) [[@formigone](https://github.com/formigone) +1. [Deseret Digital Media](http://deseretdigital.com/) [[@formigone](https://github.com/formigone)] 1. [DevITJobs.com](https://devitjobs.com/) 1. [DFDS](https://www.dfds.com/) [[@timonviola](https://github.com/timonviola)] 1. [Digital First Media](http://www.digitalfirstmedia.com/) [[@duffn](https://github.com/duffn) & [@mschmo](https://github.com/mschmo) & [@seanmuth](https://github.com/seanmuth)] 1. [Disney](https://www.disney.com/) [[@coolbeans201](https://github.com/coolbeans201)] +1. [Docaposte](https://www.docaposte.com) [[@albundy83](https://github.com/albundy83)] 1. [Docsity](https://www.docsity.com/) 1. [Doctrine](https://www.doctrine.fr/)[[@anteverse](https://github.com/anteverse)] 1. [DoorDash](https://www.doordash.com/) [[@chiragtodarka](https://github.com/chiragtodarka)] @@ -195,7 +197,7 @@ Currently, **officially** using Airflow: 1. [Dunnhumby](https://www.dunnhumby.com) 1. [Dunzo](https://www.dunzo.com)[[@masterlittle](https://github.com/masterlittle)] 1. [Dynata](https://www.dynata.com) [[@neil3handari](https://github.com/neil3handari)] -1. [e-MPS](https://e-mps.co.uk/)[[@IanDanielM](https://github.com/IanDanielM) +1. [e-MPS](https://e-mps.co.uk/)[[@IanDanielM](https://github.com/IanDanielM)] 1. [Easy Taxi](http://www.easytaxi.com/) [[@caique-lima](https://github.com/caique-lima) & [@diraol](https://github.com/diraol)] 1. [EBANX](https://www.ebanx.com/) [[@diogodilcl](https://github.com/diogodilcl) & [@estevammr](https://github.com/estevammr) & [@filipe-banzoli](https://github.com/filipe-banzoli) & [@lara-clink](https://github.com/lara-clink) & [@Lucasdsvenancio](https://github.com/Lucasdsvenancio) & [@mariotaddeucci](https://github.com/mariotaddeucci) & [@nadiapetramont](https://github.com/nadiapetramont) & [@nathangngencissk](https://github.com/nathangngencissk) & [@patrickjuan](https://github.com/patrickjuan) & [@raafaadg](https://github.com/raafaadg) & [@samebanx](https://github.com/samebanx) & [@thiagoschonrock](https://github.com/thiagoschonrock) & [@whrocha](https://github.com/whrocha)] 1. [Elai Data](https://www.elaidata.com/) [[@lgov](https://github.com/lgov)] @@ -212,7 +214,7 @@ Currently, **officially** using Airflow: 1. [Etsy](https://www.etsy.com) [[@mchalek](https://github.com/mchalek)] 1. [EUIGS - Admiral Group](https://www.linkedin.com/company/euiitglobalservices) [[@emilioego](https://github.com/emilioego)] 1. [Europace](https://www.europace.de/) -1. [Europcar](https://www.europcar.com/en-us) [[@Conformist101](https://github.com/Conformist101) & [@davidpr91](https://github.com/davidpr91) & [@jcarbonell](https://github.com/jcarbonell)& [@marc-rf](https://github.com/marc-rf)& [@VictorGeaGarcia](https://github.com/VictorGeaGarcia)] +1. [Europcar](https://www.europcar.com/en-us) [[@Conformist101](https://github.com/Conformist101) & [@davidpr91](https://github.com/davidpr91) & [@jcarbonell](https://github.com/jcarbonell) & [@marc-rf](https://github.com/marc-rf) & [@VictorGeaGarcia](https://github.com/VictorGeaGarcia)] 1. [Everis](https://www.everis.com) [[@diegobenedicto](https://github.com/diegobenedicto)] 1. [Everlane](https://everlane.com) [[@NickBenthem](https://github.com/NickBenthem)] 1. [evo.company](https://evo.company/) [[@orhideous](https://github.com/orhideous)] @@ -293,7 +295,7 @@ Currently, **officially** using Airflow: 1. [Inoopa](https://www.inoopa.com/) [[@GraphtyLove](https://github.com/GraphtyLove)] 1. [Instacart 🥕](http://www.instacart.com/) [[@arp1t](https://github.com/arp1t) & [@code-sauce](https://github.com/code-sauce) & [@jasonlew](https://github.com/jasonlew) & [@j4p3](https://github.com/j4p3) & [@lubert](https://github.com/lubert) & [@mmontagna](https://github.com/mmontagna) & [@RyanAD](https://github.com/RyanAD) &[@zzadeh](https://github.com/zzadeh)] 1. [Intellischool 🎓](https://intellischool.co/) [[@intelliscl](https://github.com/intelliscl) & [@dave-philp](https://github.com/dave-philp)] -1. [Inter Platform Inc.](https://www.bancointer.com.br/) [[@wolvery](https://github.com/wolvery) +1. [Inter Platform Inc.](https://www.bancointer.com.br/) [[@wolvery](https://github.com/wolvery)] 1. [Intercom](http://www.intercom.com/) [[@fox](https://github.com/fox) & [@paulvic](https://github.com/paulvic)] 1. [Interia](http://www.interia.pl) 1. [Investorise](https://investorise.com/) [[@svenvarkel](https://github.com/svenvarkel)] @@ -513,7 +515,7 @@ Currently, **officially** using Airflow: 1. [Topgolf](https://topgolf.com/)[[@BhaveshSK](https://github.com/BhaveshSK)] 1. [Toplyne](https://toplyne.io)[[@Toplyne](https://github.com/Toplyne/)] 1. [Trade Republic](https://traderepublic.com/) -1. [Trakken](https://www.trkkn.com/) [[@itroulli](https://github.com/itroulli), [@gthar](https://github.com/gthar), [@qulo](https://github.com/qulo), [@Oscar-Rod](https://github.com/Oscar-Rod), [@kondla](https://github.com/kondla), [@semuar](https://github.com/semuar), [@ManuelFreytag](https://github.com/ManuelFreytag) +1. [Trakken](https://www.trkkn.com/) [[@itroulli](https://github.com/itroulli), [@gthar](https://github.com/gthar), [@qulo](https://github.com/qulo), [@Oscar-Rod](https://github.com/Oscar-Rod), [@kondla](https://github.com/kondla), [@semuar](https://github.com/semuar), [@ManuelFreytag](https://github.com/ManuelFreytag)] 1. [Travix](https://www.travix.com/) 1. [Trocafone](https://www.trocafone.com/) [[@idontdomath](https://github.com/idontdomath) & [@gseva](https://github.com/gseva) & [@ordonezf](https://github.com/ordonezf) & [@PalmaLeandro](https://github.com/PalmaLeandro)] 1. [TruFactor](https://trufactor.io/) [[@gholmes](https://github.com/gholmes) & [@angadsingh](https://github.com/angadsingh/)] diff --git a/airflow-core/docs/best-practices.rst b/airflow-core/docs/best-practices.rst index 28c3285339ac2..1383ce9a3a53c 100644 --- a/airflow-core/docs/best-practices.rst +++ b/airflow-core/docs/best-practices.rst @@ -1010,7 +1010,7 @@ There are certain limitations and overhead introduced by this operator: same worker might be affected by previous tasks creating/modifying files etc. You can see detailed examples of using :class:`airflow.providers.standard.operators.python.PythonVirtualenvOperator` in -:ref:`this section in the Taskflow API tutorial `. +:ref:`this section in the TaskFlow API tutorial `. Using ExternalPythonOperator @@ -1078,7 +1078,7 @@ The nice thing about this is that you can switch the decorator back at any time developing it "dynamically" with ``PythonVirtualenvOperator``. You can see detailed examples of using :class:`airflow.providers.standard.operators.python.ExternalPythonOperator` in -:ref:`Taskflow External Python example ` +:ref:`TaskFlow External Python example ` Using DockerOperator or Kubernetes Pod Operator ----------------------------------------------- @@ -1142,9 +1142,9 @@ The drawbacks: containers etc. in order to author a DAG that uses those operators. You can see detailed examples of using :class:`airflow.operators.providers.Docker` in -:ref:`Taskflow Docker example ` +:ref:`TaskFlow Docker example ` and :class:`airflow.providers.cncf.kubernetes.operators.pod.KubernetesPodOperator` -:ref:`Taskflow Kubernetes example ` +:ref:`TaskFlow Kubernetes example ` Using multiple Docker Images and Celery Queues ---------------------------------------------- diff --git a/airflow-core/docs/conf.py b/airflow-core/docs/conf.py index a15fd4b8ad391..c9886a172b328 100644 --- a/airflow-core/docs/conf.py +++ b/airflow-core/docs/conf.py @@ -120,7 +120,6 @@ PACKAGES_THAT_WE_SHOULD_ADD_TO_API_DOCS = { "hooks", - "decorators", "example_dags", "executors", "operators", @@ -140,15 +139,7 @@ MODELS_THAT_SHOULD_BE_INCLUDED_IN_API_DOCS: set[str] = { "baseoperator.py", - "connection.py", - "dag.py", - "dagrun.py", - "dagbag.py", "param.py", - "taskinstance.py", - "taskinstancekey.py", - "variable.py", - "xcom.py", } diff --git a/airflow-core/docs/core-concepts/auth-manager/index.rst b/airflow-core/docs/core-concepts/auth-manager/index.rst index 8af0d249a9e6e..b0c99abf4bf54 100644 --- a/airflow-core/docs/core-concepts/auth-manager/index.rst +++ b/airflow-core/docs/core-concepts/auth-manager/index.rst @@ -91,14 +91,28 @@ Some reasons you may want to write a custom auth manager include: * You'd like to use an auth manager that leverages an identity provider from your preferred cloud provider. * You have a private user management tool that is only available to you or your organization. -Authentication related BaseAuthManager methods -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +User representation +^^^^^^^^^^^^^^^^^^^ + +:class:`~airflow.api_fastapi.auth.managers.base_auth_manager.BaseAuthManager` defines an authentication manager, +parameterized by a user class T representing the authenticated user type. +Auth manager implementations (subclasses of :class:`~airflow.api_fastapi.auth.managers.base_auth_manager.BaseAuthManager`) +should specify the associated concrete user type. Each auth manager has its own user type definition. +Concrete user types should be subclass of :class:`~airflow.api_fastapi.auth.managers.models.base_user.BaseUser`. + +Authentication related methods +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -* ``get_user``: Return the signed-in user. * ``get_url_login``: Return the URL the user is redirected to for signing in. +* ``get_url_logout``: Return the URL the user is redirected to when logging out. This is an optional method, + this redirection is usually needed to invalidate resources when logging out, such as a session. +* ``serialize_user``: Serialize a user instance to a dict. This dict is the actual content of the JWT token. + It should contain all the information needed to identify the user and make an authorization request. +* ``deserialize_user``: Create a user instance from a dict. The dict is the payload of the JWT token. + This is the same dict returned by ``serialize_user``. -Authorization related BaseAuthManager methods -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Authorization related methods +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Most of authorization methods in :class:`~airflow.api_fastapi.auth.managers.base_auth_manager.BaseAuthManager` look the same. Let's go over the different parameters used by most of these methods. @@ -122,17 +136,21 @@ These authorization methods are: Also, ``is_authorized_dag`` is called for any entity related to dags (e.g. task instances, dag runs, ...). This information is passed in ``access_entity``. Example: ``auth_manager.is_authorized_dag(method="GET", access_entity=DagAccessEntity.Run, details=DagDetails(id="dag-1"))`` asks whether the user has permission to read the Dag runs of the dag "dag-1". -* ``is_authorized_dataset``: Return whether the user is authorized to access Airflow datasets. Some details about the dataset can be provided (e.g. the dataset uri). +* ``is_authorized_backfill``: Return whether the user is authorized to access Airflow backfills. Some details about the backfill can be provided (e.g. the backfill ID). +* ``is_authorized_asset``: Return whether the user is authorized to access Airflow assets. Some details about the asset can be provided (e.g. the asset ID). +* ``is_authorized_asset_alias``: Return whether the user is authorized to access Airflow asset aliases. Some details about the asset alias can be provided (e.g. the asset alias ID). * ``is_authorized_pool``: Return whether the user is authorized to access Airflow pools. Some details about the pool can be provided (e.g. the pool name). * ``is_authorized_variable``: Return whether the user is authorized to access Airflow variables. Some details about the variable can be provided (e.g. the variable key). * ``is_authorized_view``: Return whether the user is authorized to access a specific view in Airflow. The view is specified through ``access_view`` (e.g. ``AccessView.CLUSTER_ACTIVITY``). * ``is_authorized_custom_view``: Return whether the user is authorized to access a specific view not defined in Airflow. This view can be provided by the auth manager itself or a plugin defined by the user. +* ``filter_authorized_menu_items``: Given the list of menu items in the UI, return the list of menu items the user has access to. JWT token management by auth managers ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The auth manager is responsible for creating the JWT token needed to interact with Airflow public API. -To achieve this, the auth manager **must** provide an endpoint to create this JWT token. This endpoint must be -available at ``POST /auth/token`` +To achieve this, the auth manager **must** provide an endpoint to create this JWT token. This endpoint is usually +available at ``POST /auth/token``. +Please double check the auth manager documentation to find the accurate token generation endpoint. The auth manager is also responsible of passing the JWT token to Airflow UI. The protocol to exchange the JWT token between the auth manager and Airflow UI is using cookies. The auth manager needs to save the JWT token in a @@ -159,9 +177,6 @@ Optional methods recommended to override for optimization The following methods aren't required to override to have a functional Airflow auth manager. However, it is recommended to override these to make your auth manager faster (and potentially less costly): * ``batch_is_authorized_dag``: Batch version of ``is_authorized_dag``. If not overridden, it will call ``is_authorized_dag`` for every single item. -* ``batch_is_authorized_connection``: Batch version of ``is_authorized_connection``. If not overridden, it will call ``is_authorized_connection`` for every single item. -* ``batch_is_authorized_pool``: Batch version of ``is_authorized_pool``. If not overridden, it will call ``is_authorized_pool`` for every single item. -* ``batch_is_authorized_variable``: Batch version of ``is_authorized_variable``. If not overridden, it will call ``is_authorized_variable`` for every single item. * ``get_authorized_dag_ids``: Return the list of DAG IDs the user has access to. If not overridden, it will call ``is_authorized_dag`` for every single DAG available in the environment. CLI @@ -204,6 +219,16 @@ To extend the API server application, you need to implement the ``get_fastapi_ap Such additional endpoints can be used to manage resources such as users, groups, roles (if any) handled by your auth manager. Endpoints defined by ``get_fastapi_app`` are mounted in ``/auth``. +Other optional methods +^^^^^^^^^^^^^^^^^^^^^^ + +* ``init``: This method is executed when Airflow is initializing. + Override this method if you need to make any action (e.g. create resources, API call) that the auth manager needs. +* ``get_extra_menu_items``: Provide additional links to be added to the menu in the UI. +* ``get_db_manager``: If your auth manager requires one or several database managers (see :class:`~airflow.utils.db_manager.BaseDBManager`), + their class paths need to be returned as part of this method. By doing so, they will be automatically added to the + config ``[database] external_db_managers``. + Next Steps ---------- diff --git a/airflow-core/docs/core-concepts/params.rst b/airflow-core/docs/core-concepts/params.rst index 805da212d352a..f6d8a2c5c7a89 100644 --- a/airflow-core/docs/core-concepts/params.rst +++ b/airflow-core/docs/core-concepts/params.rst @@ -32,7 +32,7 @@ If the user-supplied values don't pass validation, Airflow shows a warning inste DAG-level Params ---------------- -To add Params to a :class:`~airflow.models.dag.DAG`, initialize it with the ``params`` kwarg. +To add Params to a :class:`~airflow.sdk.DAG`, initialize it with the ``params`` kwarg. Use a dictionary that maps Param names to either a :class:`~airflow.sdk.definitions.param.Param` or an object indicating the parameter's default value. .. code-block:: diff --git a/airflow-core/docs/core-concepts/variables.rst b/airflow-core/docs/core-concepts/variables.rst index db0ffacb0884f..6487fd0c131a3 100644 --- a/airflow-core/docs/core-concepts/variables.rst +++ b/airflow-core/docs/core-concepts/variables.rst @@ -33,6 +33,20 @@ To use them, just import and call ``get`` on the Variable model:: # Returns the value of default (None) if the variable is not set baz = Variable.get("baz", default=None) +You can also access variables through the Task Context using +:func:`~airflow.sdk.get_current_context`: + +.. code-block:: python + + from airflow.sdk import get_current_context + + + def my_task(): + context = get_current_context() + var = context["var"] + my_variable = var.get("my_variable_name") + return my_variable + You can also use them from :ref:`templates `:: # Raw value diff --git a/airflow-core/docs/core-concepts/xcoms.rst b/airflow-core/docs/core-concepts/xcoms.rst index 2be9b75bbf849..93463a752768e 100644 --- a/airflow-core/docs/core-concepts/xcoms.rst +++ b/airflow-core/docs/core-concepts/xcoms.rst @@ -25,6 +25,9 @@ XComs (short for "cross-communications") are a mechanism that let :doc:`tasks` t An XCom is identified by a ``key`` (essentially its name), as well as the ``task_id`` and ``dag_id`` it came from. They can have any serializable value (including objects that are decorated with ``@dataclass`` or ``@attr.define``, see :ref:`TaskFlow arguments `:), but they are only designed for small amounts of data; do not use them to pass around large values, like dataframes. +XCom operations should be performed through the Task Context using +:func:`~airflow.sdk.get_current_context`. Directly updating using XCom database model is not possible. + XComs are explicitly "pushed" and "pulled" to/from their storage using the ``xcom_push`` and ``xcom_pull`` methods on Task Instances. To push a value within a task called **"task-1"** that will be used by another task: @@ -73,8 +76,6 @@ An example of pushing multiple XComs and pulling them individually: # Pulling entire xcom data from push_multiple task data = context["ti"].xcom_pull(task_ids="push_multiple", key="return_value") - - .. note:: If the first task run is not succeeded then on every retry task XComs will be cleared to make the task run idempotent. @@ -91,7 +92,7 @@ Custom XCom Backends The XCom system has interchangeable backends, and you can set which backend is being used via the ``xcom_backend`` configuration option. -If you want to implement your own backend, you should subclass :class:`~airflow.models.xcom.BaseXCom`, and override the ``serialize_value`` and ``deserialize_value`` methods. +If you want to implement your own backend, you should subclass :class:`~airflow.sdk.bases.xcom.BaseXCom`, and override the ``serialize_value`` and ``deserialize_value`` methods. You can override the ``purge`` method in the ``BaseXCom`` class to have control over purging the xcom data from the custom backend. This will be called as part of ``delete``. @@ -104,6 +105,6 @@ If you can exec into a terminal in an Airflow container, you can then print out .. code-block:: python - from airflow.models.xcom import XCom + from airflow.sdk.execution_time.xcom import XCom print(XCom.__name__) diff --git a/airflow-core/docs/howto/connection.rst b/airflow-core/docs/howto/connection.rst index 84aa1648b8224..e58d0260db49b 100644 --- a/airflow-core/docs/howto/connection.rst +++ b/airflow-core/docs/howto/connection.rst @@ -22,7 +22,7 @@ Managing Connections For an overview of hooks and connections, see :doc:`/authoring-and-scheduling/connections`. -Airflow's :class:`~airflow.models.connection.Connection` object is used for storing credentials and other information necessary for connecting to external services. +Airflow's :class:`~airflow.sdk.Connection` object is used for storing credentials and other information necessary for connecting to external services. Connections may be defined in the following ways: @@ -77,7 +77,7 @@ convenience property :py:meth:`~airflow.models.connection.Connection.as_json`. I .. code-block:: pycon - >>> from airflow.models.connection import Connection + >>> from airflow.sdk import Connection >>> c = Connection( ... conn_id="some_conn", ... conn_type="mysql", @@ -94,7 +94,7 @@ In addition, same approach could be used to convert Connection from URI format t .. code-block:: pycon - >>> from airflow.models.connection import Connection + >>> from airflow.sdk import Connection >>> c = Connection( ... conn_id="awesome_conn", ... description="Example Connection", diff --git a/airflow-core/docs/howto/custom-operator.rst b/airflow-core/docs/howto/custom-operator.rst index b76a2277fbfea..d6206166e1211 100644 --- a/airflow-core/docs/howto/custom-operator.rst +++ b/airflow-core/docs/howto/custom-operator.rst @@ -24,7 +24,7 @@ Creating a custom Operator Airflow allows you to create new operators to suit the requirements of you or your team. This extensibility is one of the many features which make Apache Airflow powerful. -You can create any operator you want by extending the :class:`airflow.models.baseoperator.BaseOperator` +You can create any operator you want by extending the public SDK base class :class:`~airflow.sdk.BaseOperator`. There are two methods that you need to override in a derived class: diff --git a/airflow-core/docs/howto/docker-compose/docker-compose.yaml b/airflow-core/docs/howto/docker-compose/docker-compose.yaml index 1c298fc518865..2c2a614c9ef72 100644 --- a/airflow-core/docs/howto/docker-compose/docker-compose.yaml +++ b/airflow-core/docs/howto/docker-compose/docker-compose.yaml @@ -217,7 +217,7 @@ services: echo "For other operating systems you can get rid of the warning with manually created .env file:" echo " See: https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#setting-the-right-airflow-user" echo - export AIRFLOW_UID=$(id -u) + export AIRFLOW_UID=$$(id -u) fi one_meg=1048576 mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg)) diff --git a/airflow-core/docs/howto/email-config.rst b/airflow-core/docs/howto/email-config.rst index 532f987aca758..c3373447654d7 100644 --- a/airflow-core/docs/howto/email-config.rst +++ b/airflow-core/docs/howto/email-config.rst @@ -184,6 +184,14 @@ Follow the steps below to enable it: email_conn_id = aws_default from_email = From email + Equivalent environment variables looks like + + .. code-block:: + + AIRFLOW__EMAIL__EMAIL_BACKEND=airflow.providers.amazon.aws.utils.emailer.send_email + AIRFLOW__EMAIL__EMAIL_CONN_ID=aws_default + AIRFLOW__EMAIL__FROM_EMAIL=email@example.com + Note that for SES, you must configure from_email to the valid email that can send messages from SES. 3. Create a connection called ``aws_default``, or choose a custom connection diff --git a/airflow-core/docs/img/airflow_erd.sha256 b/airflow-core/docs/img/airflow_erd.sha256 index bc2221290c43f..e935d2a08ab18 100644 --- a/airflow-core/docs/img/airflow_erd.sha256 +++ b/airflow-core/docs/img/airflow_erd.sha256 @@ -1 +1 @@ -a8edcc8b63444f327ddea859988c23c87a1a722e2b6a430734e61518777d3e61 \ No newline at end of file +2e49ab99fe1076b0f3f22a52b9ee37eeb7fc20a5a043ea504cc26022f4315277 \ No newline at end of file diff --git a/airflow-core/docs/img/airflow_erd.svg b/airflow-core/docs/img/airflow_erd.svg index a7bacd714f8c7..2f9f9b4becc5e 100644 --- a/airflow-core/docs/img/airflow_erd.svg +++ b/airflow-core/docs/img/airflow_erd.svg @@ -4,11 +4,11 @@ - - + + %3 - + dag_priority_parsing_request @@ -305,1083 +305,1146 @@ asset_alias - -asset_alias - -id - - [INTEGER] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL + +asset_alias + +id + + [INTEGER] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL asset_alias_asset - -asset_alias_asset - -alias_id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL + +asset_alias_asset + +alias_id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset - -0..N -1 + +0..N +1 asset_alias_asset_event - -asset_alias_asset_event - -alias_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +asset_alias_asset_event + +alias_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_alias--asset_alias_asset_event - -0..N -1 + +0..N +1 dag_schedule_asset_alias_reference - -dag_schedule_asset_alias_reference - -alias_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_alias_reference + +alias_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset_alias--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 asset - -asset - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -extra - - [JSON] - NOT NULL - -group - - [VARCHAR(1500)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +extra + + [JSON] + NOT NULL + +group + + [VARCHAR(1500)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_alias_asset - -0..N -1 + +0..N +1 asset_trigger - -asset_trigger - -asset_id - - [INTEGER] - NOT NULL - -trigger_id - - [INTEGER] - NOT NULL + +asset_trigger + +asset_id + + [INTEGER] + NOT NULL + +trigger_id + + [INTEGER] + NOT NULL asset--asset_trigger - -0..N -1 + +0..N +1 asset_active - -asset_active - -name - - [VARCHAR(1500)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL + +asset_active + +name + + [VARCHAR(1500)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL asset--asset_active - -1 -1 + +1 +1 asset--asset_active - -1 -1 + +1 +1 dag_schedule_asset_reference - -dag_schedule_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--dag_schedule_asset_reference - -0..N -1 + +0..N +1 task_outlet_asset_reference - -task_outlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_outlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_outlet_asset_reference - -0..N -1 + +0..N +1 task_inlet_asset_reference - -task_inlet_asset_reference - -asset_id - - [INTEGER] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL + +task_inlet_asset_reference + +asset_id + + [INTEGER] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL asset--task_inlet_asset_reference - -0..N -1 + +0..N +1 asset_dag_run_queue - -asset_dag_run_queue - -asset_id - - [INTEGER] - NOT NULL - -target_dag_id - - [VARCHAR(250)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +asset_dag_run_queue + +asset_id + + [INTEGER] + NOT NULL + +target_dag_id + + [VARCHAR(250)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL asset--asset_dag_run_queue - -0..N -1 + +0..N +1 asset_event - -asset_event - -id - - [INTEGER] - NOT NULL - -asset_id - - [INTEGER] - NOT NULL - -extra - - [JSON] - NOT NULL - -source_dag_id - - [VARCHAR(250)] - -source_map_index - - [INTEGER] - -source_run_id - - [VARCHAR(250)] - -source_task_id - - [VARCHAR(250)] - -timestamp - - [TIMESTAMP] - NOT NULL + +asset_event + +id + + [INTEGER] + NOT NULL + +asset_id + + [INTEGER] + NOT NULL + +extra + + [JSON] + NOT NULL + +source_dag_id + + [VARCHAR(250)] + +source_map_index + + [INTEGER] + +source_run_id + + [VARCHAR(250)] + +source_task_id + + [VARCHAR(250)] + +timestamp + + [TIMESTAMP] + NOT NULL asset_event--asset_alias_asset_event - -0..N -1 + +0..N +1 dagrun_asset_event - -dagrun_asset_event - -dag_run_id - - [INTEGER] - NOT NULL - -event_id - - [INTEGER] - NOT NULL + +dagrun_asset_event + +dag_run_id + + [INTEGER] + NOT NULL + +event_id + + [INTEGER] + NOT NULL asset_event--dagrun_asset_event - -0..N -1 + +0..N +1 trigger - -trigger - -id - - [INTEGER] - NOT NULL - -classpath - - [VARCHAR(1000)] - NOT NULL - -created_date - - [TIMESTAMP] - NOT NULL - -kwargs - - [TEXT] - NOT NULL - -triggerer_id - - [INTEGER] + +trigger + +id + + [INTEGER] + NOT NULL + +classpath + + [VARCHAR(1000)] + NOT NULL + +created_date + + [TIMESTAMP] + NOT NULL + +kwargs + + [TEXT] + NOT NULL + +triggerer_id + + [INTEGER] trigger--asset_trigger - -0..N -1 + +0..N +1 task_instance - -task_instance - -id - - [UUID] - NOT NULL - -context_carrier - - [JSONB] - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -last_heartbeat_at - - [TIMESTAMP] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSONB] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -scheduled_dttm - - [TIMESTAMP] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] + +task_instance + +id + + [UUID] + NOT NULL + +context_carrier + + [JSONB] + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +last_heartbeat_at + + [TIMESTAMP] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSONB] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +scheduled_dttm + + [TIMESTAMP] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] trigger--task_instance - -0..N -{0,1} + +0..N +{0,1} - + task_map - -task_map - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -keys - - [JSONB] - -length - - [INTEGER] - NOT NULL - - - -task_instance--task_map - -0..N -1 + +task_map + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +keys + + [JSONB] + +length + + [INTEGER] + NOT NULL task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 task_instance--task_map - -0..N -1 + +0..N +1 + + + +task_instance--task_map + +0..N +1 - + task_reschedule - -task_reschedule - -id - - [INTEGER] - NOT NULL - -duration - - [INTEGER] - NOT NULL - -end_date - - [TIMESTAMP] - NOT NULL - -reschedule_date - - [TIMESTAMP] - NOT NULL - -start_date - - [TIMESTAMP] - NOT NULL - -ti_id - - [UUID] - NOT NULL + +task_reschedule + +id + + [INTEGER] + NOT NULL + +duration + + [INTEGER] + NOT NULL + +end_date + + [TIMESTAMP] + NOT NULL + +reschedule_date + + [TIMESTAMP] + NOT NULL + +start_date + + [TIMESTAMP] + NOT NULL + +ti_id + + [UUID] + NOT NULL - + task_instance--task_reschedule - -0..N -1 + +0..N +1 - + xcom - -xcom - -dag_run_id - - [INTEGER] - NOT NULL - -key - - [VARCHAR(512)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL - -value - - [JSONB] - - - -task_instance--xcom - -0..N -1 + +xcom + +dag_run_id + + [INTEGER] + NOT NULL + +key + + [VARCHAR(512)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL + +value + + [JSONB] task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 task_instance--xcom - -0..N -1 + +0..N +1 + + + +task_instance--xcom + +0..N +1 - + task_instance_note - -task_instance_note - -ti_id - - [UUID] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +task_instance_note + +ti_id + + [UUID] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] - + task_instance--task_instance_note - -1 -1 + +1 +1 - + task_instance_history - -task_instance_history - -task_instance_id - - [UUID] - NOT NULL - -context_carrier - - [JSONB] - -custom_operator_name - - [VARCHAR(1000)] - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - -duration - - [DOUBLE_PRECISION] - -end_date - - [TIMESTAMP] - -executor - - [VARCHAR(1000)] - -executor_config - - [BYTEA] - -external_executor_id - - [VARCHAR(250)] - -hostname - - [VARCHAR(1000)] - -map_index - - [INTEGER] - NOT NULL - -max_tries - - [INTEGER] - -next_kwargs - - [JSONB] - -next_method - - [VARCHAR(1000)] - -operator - - [VARCHAR(1000)] - -pid - - [INTEGER] - -pool - - [VARCHAR(256)] - NOT NULL - -pool_slots - - [INTEGER] - NOT NULL - -priority_weight - - [INTEGER] - -queue - - [VARCHAR(256)] - -queued_by_job_id - - [INTEGER] - -queued_dttm - - [TIMESTAMP] - -rendered_map_index - - [VARCHAR(250)] - -run_id - - [VARCHAR(250)] - NOT NULL - -scheduled_dttm - - [TIMESTAMP] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(20)] - -task_display_name - - [VARCHAR(2000)] - -task_id - - [VARCHAR(250)] - NOT NULL - -trigger_id - - [INTEGER] - -trigger_timeout - - [TIMESTAMP] - -try_number - - [INTEGER] - NOT NULL - -unixname - - [VARCHAR(1000)] - -updated_at - - [TIMESTAMP] - - - -task_instance--task_instance_history - -0..N -1 + +task_instance_history + +task_instance_id + + [UUID] + NOT NULL + +context_carrier + + [JSONB] + +custom_operator_name + + [VARCHAR(1000)] + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + +duration + + [DOUBLE_PRECISION] + +end_date + + [TIMESTAMP] + +executor + + [VARCHAR(1000)] + +executor_config + + [BYTEA] + +external_executor_id + + [VARCHAR(250)] + +hostname + + [VARCHAR(1000)] + +map_index + + [INTEGER] + NOT NULL + +max_tries + + [INTEGER] + +next_kwargs + + [JSONB] + +next_method + + [VARCHAR(1000)] + +operator + + [VARCHAR(1000)] + +pid + + [INTEGER] + +pool + + [VARCHAR(256)] + NOT NULL + +pool_slots + + [INTEGER] + NOT NULL + +priority_weight + + [INTEGER] + +queue + + [VARCHAR(256)] + +queued_by_job_id + + [INTEGER] + +queued_dttm + + [TIMESTAMP] + +rendered_map_index + + [VARCHAR(250)] + +run_id + + [VARCHAR(250)] + NOT NULL + +scheduled_dttm + + [TIMESTAMP] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(20)] + +task_display_name + + [VARCHAR(2000)] + +task_id + + [VARCHAR(250)] + NOT NULL + +trigger_id + + [INTEGER] + +trigger_timeout + + [TIMESTAMP] + +try_number + + [INTEGER] + NOT NULL + +unixname + + [VARCHAR(1000)] + +updated_at + + [TIMESTAMP] task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 task_instance--task_instance_history - -0..N -1 + +0..N +1 + + + +task_instance--task_instance_history + +0..N +1 + + + +hitl_detail + +hitl_detail + +ti_id + + [UUID] + NOT NULL + +body + + [TEXT] + +chosen_options + + [JSON] + +defaults + + [JSON] + +multiple + + [BOOLEAN] + +options + + [JSON] + NOT NULL + +params + + [JSON] + NOT NULL + +params_input + + [JSON] + NOT NULL + +response_at + + [TIMESTAMP] + +subject + + [TEXT] + NOT NULL + +user_id + + [VARCHAR(128)] + + + +task_instance--hitl_detail + +1 +1 - + rendered_task_instance_fields - -rendered_task_instance_fields - -dag_id - - [VARCHAR(250)] - NOT NULL - -map_index - - [INTEGER] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -task_id - - [VARCHAR(250)] - NOT NULL - -k8s_pod_yaml - - [JSON] - -rendered_fields - - [JSON] - NOT NULL + +rendered_task_instance_fields + +dag_id + + [VARCHAR(250)] + NOT NULL + +map_index + + [INTEGER] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +task_id + + [VARCHAR(250)] + NOT NULL + +k8s_pod_yaml + + [JSON] + +rendered_fields + + [JSON] + NOT NULL - + task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 - + task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 - + task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 - + task_instance--rendered_task_instance_fields - -0..N -1 + +0..N +1 @@ -1526,716 +1589,739 @@ dag--dag_schedule_asset_alias_reference - -0..N -1 + +0..N +1 dag--dag_schedule_asset_reference - -0..N -1 + +0..N +1 dag--task_outlet_asset_reference - -0..N -1 + +0..N +1 dag--task_inlet_asset_reference - -0..N -1 + +0..N +1 dag--asset_dag_run_queue - -0..N -1 + +0..N +1 dag_schedule_asset_name_reference - -dag_schedule_asset_name_reference - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(1500)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_name_reference + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(1500)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL dag--dag_schedule_asset_name_reference - -0..N -1 + +0..N +1 dag_schedule_asset_uri_reference - -dag_schedule_asset_uri_reference - -dag_id - - [VARCHAR(250)] - NOT NULL - -uri - - [VARCHAR(1500)] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL + +dag_schedule_asset_uri_reference + +dag_id + + [VARCHAR(250)] + NOT NULL + +uri + + [VARCHAR(1500)] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL dag--dag_schedule_asset_uri_reference - -0..N -1 + +0..N +1 dag_version - -dag_version - -id - - [UUID] - NOT NULL - -bundle_name - - [VARCHAR(250)] - -bundle_version - - [VARCHAR(250)] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -version_number - - [INTEGER] - NOT NULL + +dag_version + +id + + [UUID] + NOT NULL + +bundle_name + + [VARCHAR(250)] + +bundle_version + + [VARCHAR(250)] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +version_number + + [INTEGER] + NOT NULL dag--dag_version - -0..N -1 + +0..N +1 dag_tag - -dag_tag - -dag_id - - [VARCHAR(250)] - NOT NULL - -name - - [VARCHAR(100)] - NOT NULL + +dag_tag + +dag_id + + [VARCHAR(250)] + NOT NULL + +name + + [VARCHAR(100)] + NOT NULL dag--dag_tag - -0..N -1 + +0..N +1 dag_owner_attributes - -dag_owner_attributes - -dag_id - - [VARCHAR(250)] - NOT NULL - -owner - - [VARCHAR(500)] - NOT NULL - -link - - [VARCHAR(500)] - NOT NULL + +dag_owner_attributes + +dag_id + + [VARCHAR(250)] + NOT NULL + +owner + + [VARCHAR(500)] + NOT NULL + +link + + [VARCHAR(500)] + NOT NULL dag--dag_owner_attributes - -0..N -1 + +0..N +1 dag_warning - -dag_warning - -dag_id - - [VARCHAR(250)] - NOT NULL - -warning_type - - [VARCHAR(50)] - NOT NULL - -message - - [TEXT] - NOT NULL - -timestamp - - [TIMESTAMP] - NOT NULL + +dag_warning + +dag_id + + [VARCHAR(250)] + NOT NULL + +warning_type + + [VARCHAR(50)] + NOT NULL + +message + + [TEXT] + NOT NULL + +timestamp + + [TIMESTAMP] + NOT NULL dag--dag_warning - -0..N -1 + +0..N +1 - + +dag_favorite + +dag_favorite + +dag_id + + [VARCHAR(250)] + NOT NULL + +user_id + + [VARCHAR(250)] + NOT NULL + + + +dag--dag_favorite + +0..N +1 + + + deadline - -deadline - -id - - [UUID] - NOT NULL - -callback - - [VARCHAR(500)] - NOT NULL - -callback_kwargs - - [JSON] - -dag_id - - [VARCHAR(250)] - -dagrun_id - - [INTEGER] - -deadline_time - - [TIMESTAMP] - NOT NULL + +deadline + +id + + [UUID] + NOT NULL + +callback + + [VARCHAR(500)] + NOT NULL + +callback_kwargs + + [JSON] + +dag_id + + [VARCHAR(250)] + +dagrun_id + + [INTEGER] + +deadline_time + + [TIMESTAMP] + NOT NULL - + dag--deadline - -0..N -{0,1} + +0..N +{0,1} - + dag_version--task_instance - -0..N -{0,1} + +0..N +1 - + dag_run - -dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - -bundle_version - - [VARCHAR(250)] - -clear_number - - [INTEGER] - NOT NULL - -conf - - [JSONB] - -context_carrier - - [JSONB] - -created_dag_version_id - - [UUID] - -creating_job_id - - [INTEGER] - -dag_id - - [VARCHAR(250)] - NOT NULL - -data_interval_end - - [TIMESTAMP] - -data_interval_start - - [TIMESTAMP] - -end_date - - [TIMESTAMP] - -last_scheduling_decision - - [TIMESTAMP] - -log_template_id - - [INTEGER] - -logical_date - - [TIMESTAMP] - -queued_at - - [TIMESTAMP] - -run_after - - [TIMESTAMP] - NOT NULL - -run_id - - [VARCHAR(250)] - NOT NULL - -run_type - - [VARCHAR(50)] - NOT NULL - -scheduled_by_job_id - - [INTEGER] - -span_status - - [VARCHAR(250)] - NOT NULL - -start_date - - [TIMESTAMP] - -state - - [VARCHAR(50)] - -triggered_by - - [VARCHAR(50)] - -triggering_user_name - - [VARCHAR(512)] - -updated_at - - [TIMESTAMP] + +dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + +bundle_version + + [VARCHAR(250)] + +clear_number + + [INTEGER] + NOT NULL + +conf + + [JSONB] + +context_carrier + + [JSONB] + +created_dag_version_id + + [UUID] + +creating_job_id + + [INTEGER] + +dag_id + + [VARCHAR(250)] + NOT NULL + +data_interval_end + + [TIMESTAMP] + +data_interval_start + + [TIMESTAMP] + +end_date + + [TIMESTAMP] + +last_scheduling_decision + + [TIMESTAMP] + +log_template_id + + [INTEGER] + +logical_date + + [TIMESTAMP] + +queued_at + + [TIMESTAMP] + +run_after + + [TIMESTAMP] + NOT NULL + +run_id + + [VARCHAR(250)] + NOT NULL + +run_type + + [VARCHAR(50)] + NOT NULL + +scheduled_by_job_id + + [INTEGER] + +span_status + + [VARCHAR(250)] + NOT NULL + +start_date + + [TIMESTAMP] + +state + + [VARCHAR(50)] + +triggered_by + + [VARCHAR(50)] + +triggering_user_name + + [VARCHAR(512)] + +updated_at + + [TIMESTAMP] - + dag_version--dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_code - -dag_code - -id - - [UUID] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -fileloc - - [VARCHAR(2000)] - NOT NULL - -last_updated - - [TIMESTAMP] - NOT NULL - -source_code - - [TEXT] - NOT NULL - -source_code_hash - - [VARCHAR(32)] - NOT NULL + +dag_code + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +fileloc + + [VARCHAR(2000)] + NOT NULL + +last_updated + + [TIMESTAMP] + NOT NULL + +source_code + + [TEXT] + NOT NULL + +source_code_hash + + [VARCHAR(32)] + NOT NULL - + dag_version--dag_code - -0..N -1 + +0..N +1 - + serialized_dag - -serialized_dag - -id - - [UUID] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -dag_hash - - [VARCHAR(32)] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_version_id - - [UUID] - NOT NULL - -data - - [JSON] - -data_compressed - - [BYTEA] - -last_updated - - [TIMESTAMP] - NOT NULL + +serialized_dag + +id + + [UUID] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +dag_hash + + [VARCHAR(32)] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_version_id + + [UUID] + NOT NULL + +data + + [JSON] + +data_compressed + + [BYTEA] + +last_updated + + [TIMESTAMP] + NOT NULL - + dag_version--serialized_dag - -0..N -1 + +0..N +1 - + dag_run--dagrun_asset_event - -0..N -1 + +0..N +1 - + dag_run--task_instance - -0..N -1 + +0..N +1 - + dag_run--task_instance - -0..N -1 + +0..N +1 - + dag_run--deadline - -0..N -{0,1} + +0..N +{0,1} - + backfill_dag_run - -backfill_dag_run - -id - - [INTEGER] - NOT NULL - -backfill_id - - [INTEGER] - NOT NULL - -dag_run_id - - [INTEGER] - -exception_reason - - [VARCHAR(250)] - -logical_date - - [TIMESTAMP] - NOT NULL - -sort_ordinal - - [INTEGER] - NOT NULL + +backfill_dag_run + +id + + [INTEGER] + NOT NULL + +backfill_id + + [INTEGER] + NOT NULL + +dag_run_id + + [INTEGER] + +exception_reason + + [VARCHAR(250)] + +logical_date + + [TIMESTAMP] + NOT NULL + +sort_ordinal + + [INTEGER] + NOT NULL - + dag_run--backfill_dag_run - -0..N -{0,1} + +0..N +{0,1} - + dag_run_note - -dag_run_note - -dag_run_id - - [INTEGER] - NOT NULL - -content - - [VARCHAR(1000)] - -created_at - - [TIMESTAMP] - NOT NULL - -updated_at - - [TIMESTAMP] - NOT NULL - -user_id - - [VARCHAR(128)] + +dag_run_note + +dag_run_id + + [INTEGER] + NOT NULL + +content + + [VARCHAR(1000)] + +created_at + + [TIMESTAMP] + NOT NULL + +updated_at + + [TIMESTAMP] + NOT NULL + +user_id + + [VARCHAR(128)] - + dag_run--dag_run_note - -1 -1 + +1 +1 - + log_template - -log_template - -id - - [INTEGER] - NOT NULL - -created_at - - [TIMESTAMP] - NOT NULL - -elasticsearch_id - - [TEXT] - NOT NULL - -filename - - [TEXT] - NOT NULL + +log_template + +id + + [INTEGER] + NOT NULL + +created_at + + [TIMESTAMP] + NOT NULL + +elasticsearch_id + + [TEXT] + NOT NULL + +filename + + [TEXT] + NOT NULL - + log_template--dag_run - -0..N -{0,1} + +0..N +{0,1} - + backfill - -backfill - -id - - [INTEGER] - NOT NULL - -completed_at - - [TIMESTAMP] - -created_at - - [TIMESTAMP] - NOT NULL - -dag_id - - [VARCHAR(250)] - NOT NULL - -dag_run_conf - - [JSON] - NOT NULL - -from_date - - [TIMESTAMP] - NOT NULL - -is_paused - - [BOOLEAN] - -max_active_runs - - [INTEGER] - NOT NULL - -reprocess_behavior - - [VARCHAR(250)] - NOT NULL - -to_date - - [TIMESTAMP] - NOT NULL - -triggering_user_name - - [VARCHAR(512)] - -updated_at - - [TIMESTAMP] - NOT NULL + +backfill + +id + + [INTEGER] + NOT NULL + +completed_at + + [TIMESTAMP] + +created_at + + [TIMESTAMP] + NOT NULL + +dag_id + + [VARCHAR(250)] + NOT NULL + +dag_run_conf + + [JSON] + NOT NULL + +from_date + + [TIMESTAMP] + NOT NULL + +is_paused + + [BOOLEAN] + +max_active_runs + + [INTEGER] + NOT NULL + +reprocess_behavior + + [VARCHAR(250)] + NOT NULL + +to_date + + [TIMESTAMP] + NOT NULL + +triggering_user_name + + [VARCHAR(512)] + +updated_at + + [TIMESTAMP] + NOT NULL - + backfill--dag_run - -0..N -{0,1} + +0..N +{0,1} - + backfill--backfill_dag_run - -0..N -1 + +0..N +1 - + alembic_version alembic_version diff --git a/airflow-core/docs/migrations-ref.rst b/airflow-core/docs/migrations-ref.rst index e75af75b23bc5..0e18989fbc8a7 100644 --- a/airflow-core/docs/migrations-ref.rst +++ b/airflow-core/docs/migrations-ref.rst @@ -39,7 +39,13 @@ Here's the list of all the Database Migrations that are executed via when you ru +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | Revision ID | Revises ID | Airflow Version | Description | +=========================+==================+===================+==============================================================+ -| ``66a7743fe20e`` (head) | ``583e80dfcef4`` | ``3.1.0`` | Add triggering user to dag_run. | +| ``40f7c30a228b`` (head) | ``5d3072c51bac`` | ``3.1.0`` | Add Human In the Loop Detail table. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``5d3072c51bac`` | ``ffdb0566c7c0`` | ``3.1.0`` | Make dag_version_id non-nullable in TaskInstance. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``ffdb0566c7c0`` | ``66a7743fe20e`` | ``3.1.0`` | Add dag_favorite table. | ++-------------------------+------------------+-------------------+--------------------------------------------------------------+ +| ``66a7743fe20e`` | ``583e80dfcef4`` | ``3.1.0`` | Add triggering user to dag_run. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ | ``583e80dfcef4`` | ``3ac9e5732b1f`` | ``3.1.0`` | Add task_inlet_asset_reference table. | +-------------------------+------------------+-------------------+--------------------------------------------------------------+ diff --git a/airflow-core/docs/public-airflow-interface.rst b/airflow-core/docs/public-airflow-interface.rst index 0d74912cc46db..0f685c16ccea4 100644 --- a/airflow-core/docs/public-airflow-interface.rst +++ b/airflow-core/docs/public-airflow-interface.rst @@ -15,6 +15,17 @@ specific language governing permissions and limitations under the License. +**PUBLIC INTERFACE FOR AIRFLOW 3.0+** +===================================== + +.. warning:: + + **This documentation covers the Public Interface for Airflow 3.0+** + + If you are using Airflow 2.x, please refer to the + `Airflow 2.11 Public Interface Documentation `_ + for the legacy interface. + Public Interface of Airflow ........................... @@ -25,9 +36,32 @@ and extending Airflow capabilities by writing new executors, plugins, operators Public Interface can be useful for building custom tools and integrations with other systems, and for automating certain aspects of the Airflow workflow. +The primary public interface for DAG Authors and task execution is using task SDK +Airflow task SDK is the primary public interface for DAG Authors and for task execution +:doc:`airflow.sdk namespace `. Direct access to the metadata database +from task code is no longer allowed. Instead, use the :doc:`Stable REST API `, +`Python Client `_, or Task Context methods. + +For comprehensive Task SDK documentation, see the `Task SDK Reference `_. + Using Airflow Public Interfaces =============================== +.. note:: + + As of **Airflow 3.0**, users should use the ``airflow.sdk`` namespace as the official **Public Interface**, as defined in `AIP-72 `_. + + Direct interaction with internal modules or the metadata database is not possible. + For stable, production-safe integration, it is recommended to use: + + - The official **REST API** + - The **Python Client SDK** (`airflow-client-python`) + - The new **Task SDK** (``airflow.sdk``) + + Related docs: + - `Release Notes 3.0 `_ + - `Task SDK Overview `_ + The following are some examples of the public interface of Airflow: * When you are writing your own operators or hooks. This is commonly done when no hook or operator exists for your use case, or when perhaps when one exists but you need to customize the behavior. @@ -56,13 +90,65 @@ way, the Stable REST API is recommended. Using the Public Interface for DAG Authors ========================================== +The primary interface for DAG Authors is the :doc:`airflow.sdk namespace `. +This provides a stable, well-defined interface for creating DAGs and tasks that is not subject to internal +implementation changes. The goal of this change is to decouple DAG authoring from Airflow internals (Scheduler, +API Server, etc.), providing a version-agnostic, stable interface for writing and maintaining DAGs across Airflow versions. + +**Key Imports from airflow.sdk:** + +**Classes:** + +* ``Asset`` +* ``BaseHook`` +* ``BaseNotifier`` +* ``BaseOperator`` +* ``BaseOperatorLink`` +* ``BaseSensorOperator`` +* ``Connection`` +* ``Context`` +* ``DAG`` +* ``EdgeModifier`` +* ``Label`` +* ``ObjectStoragePath`` +* ``Param`` +* ``TaskGroup`` +* ``Variable`` + +**Decorators and Functions:** + +* ``@asset`` +* ``@dag`` +* ``@setup`` +* ``@task`` +* ``@task_group`` +* ``@teardown`` +* ``chain`` +* ``chain_linear`` +* ``cross_downstream`` +* ``get_current_context`` +* ``get_parsing_context`` + +**Migration from Airflow 2.x:** + +For detailed migration instructions from Airflow 2.x to 3.x, including import changes and other breaking changes, +see the :doc:`Migration Guide `. + +For an exhaustive list of available classes, decorators, and functions, check ``airflow.sdk.__all__``. + +All DAGs should update imports to use ``airflow.sdk`` instead of referencing internal Airflow modules directly. +Legacy import paths (e.g., ``airflow.models.dag.DAG``, ``airflow.decorator.task``) are deprecated and will be +removed in a future Airflow version. + Dags ----- +==== The DAG is Airflow's core entity that represents a recurring workflow. You can create a DAG by -instantiating the :class:`~airflow.models.dag.DAG` class in your DAG file. You can also instantiate -them via :class:`~airflow.models.dagbag.DagBag` class that reads dags from a file or a folder. Dags -can also have parameters specified via :class:`~airflow.sdk.definitions.param.Param` class. +instantiating the :class:`~airflow.sdk.DAG` class in your DAG file. Dags can also have parameters +specified via :class:`~airflow.sdk.Param` class. + +The recommended way to create DAGs is using the :func:`~airflow.sdk.dag` decorator +from the airflow.sdk namespace. Airflow has a set of example dags that you can use to learn how to write dags @@ -77,64 +163,82 @@ You can read more about dags in :doc:`Dags `. References for the modules used in dags are here: -.. toctree:: - :includehidden: - :glob: - :maxdepth: 1 +.. note:: + The airflow.sdk namespace provides the primary interface for DAG Authors. + For detailed API documentation, see the `Task SDK Reference `_. - _api/airflow/models/dag/index - _api/airflow/models/dagbag/index +.. note:: + The :class:`~airflow.models.dagbag.DagBag` class is used internally by Airflow for loading DAGs + from files and folders. DAG Authors should use the :class:`~airflow.sdk.DAG` class from the + airflow.sdk namespace instead. -Properties of a :class:`~airflow.models.dagrun.DagRun` can also be referenced in things like :ref:`Templates `. - -.. toctree:: - :includehidden: - :glob: - :maxdepth: 1 - - _api/airflow/models/dagrun/index +.. note:: + The :class:`~airflow.models.dagrun.DagRun` class is used internally by Airflow for DAG run + management. DAG Authors should access DAG run information through the Task Context via + :func:`~airflow.sdk.get_current_context` or use the :class:`~airflow.sdk.types.DagRunProtocol` + interface. .. _pythonapi:operators: Operators ---------- +========= + +The base classes :class:`~airflow.sdk.BaseOperator` and :class:`~airflow.sdk.BaseSensorOperator` are public and may be extended to make new operators. -The base classes :class:`~airflow.models.baseoperator.BaseOperator` and :class:`~airflow.sensors.base.BaseSensorOperator` are public and may be extended to make new operators. +The base class for new operators is :class:`~airflow.sdk.BaseOperator` +from the airflow.sdk namespace. Subclasses of BaseOperator which are published in Apache Airflow are public in *behavior* but not in *structure*. That is to say, the Operator's parameters and behavior is governed by semver but the methods are subject to change at any time. Task Instances --------------- - -Task instances are the individual runs of a single task in a DAG (in a DAG Run). They are available in the context -passed to the execute method of the operators via the :class:`~airflow.models.taskinstance.TaskInstance` class. - -.. toctree:: - :includehidden: - :glob: - :maxdepth: 1 +============== - _api/airflow/models/taskinstance/index +Task instances are the individual runs of a single task in a DAG (in a DAG Run). Task instances are accessed through +the Task Context via :func:`~airflow.sdk.get_current_context`. Direct database access is not possible. +.. note:: + Task Context is part of the airflow.sdk namespace. + For detailed API documentation, see the `Task SDK Reference `_. Task Instance Keys ------------------- +================== Task instance keys are unique identifiers of task instances in a DAG (in a DAG Run). A key is a tuple that consists of -``dag_id``, ``task_id``, ``run_id``, ``try_number``, and ``map_index``. The key of a task instance can be retrieved via -:meth:`~airflow.models.taskinstance.TaskInstance.key`. +``dag_id``, ``task_id``, ``run_id``, ``try_number``, and ``map_index``. -.. toctree:: - :includehidden: - :glob: - :maxdepth: 1 +Direct access to task instance keys via the :class:`~airflow.models.taskinstance.TaskInstance` +model is no longer allowed from task code. Instead, use the Task Context via :func:`~airflow.sdk.get_current_context` +to access task instance information. + +Example of accessing task instance information through Task Context: + +.. code-block:: python + + from airflow.sdk import get_current_context + + + def my_task(): + context = get_current_context() + ti = context["ti"] + + dag_id = ti.dag_id + task_id = ti.task_id + run_id = ti.run_id + try_number = ti.try_number + map_index = ti.map_index + + print(f"Task: {dag_id}.{task_id}, Run: {run_id}, Try: {try_number}, Map Index: {map_index}") + +.. note:: + The :class:`~airflow.models.taskinstancekey.TaskInstanceKey` class is used internally by Airflow + for identifying task instances. DAG Authors should access task instance information through the + Task Context via :func:`~airflow.sdk.get_current_context` instead. - _api/airflow/models/taskinstancekey/index .. _pythonapi:hooks: Hooks ------ +===== Hooks are interfaces to external platforms and databases, implementing a common interface when possible and acting as building blocks for operators. All hooks @@ -151,14 +255,44 @@ by extending them: _api/airflow/hooks/index Public Airflow utilities ------------------------- +======================== -When writing or extending Hooks and Operators, DAG authors and developers can +When writing or extending Hooks and Operators, DAG Authors and developers can use the following classes: -* The :class:`~airflow.models.connection.Connection`, which provides access to external service credentials and configuration. -* The :class:`~airflow.models.variable.Variable`, which provides access to Airflow configuration variables. -* The :class:`~airflow.models.xcom.XCom` which are used to access to inter-task communication data. +* The :class:`~airflow.sdk.Connection`, which provides access to external service credentials and configuration. +* The :class:`~airflow.sdk.Variable`, which provides access to Airflow configuration variables. +* The :class:`~airflow.sdk.execution_time.xcom.XCom` which are used to access to inter-task communication data. + +Connection and Variable operations should be performed through the Task Context using +:func:`~airflow.sdk.get_current_context` and the task instance's methods, or through the airflow.sdk namespace. +Direct database access to :class:`~airflow.models.connection.Connection` and :class:`~airflow.models.variable.Variable` +models is no longer allowed from task code. + +Example of accessing Connections and Variables through Task Context: + +.. code-block:: python + + from airflow.sdk import get_current_context + + + def my_task(): + context = get_current_context() + + conn = context["conn"] + my_connection = conn.get("my_connection_id") + + var = context["var"] + my_variable = var.value.get("my_variable_name") + +Example of using airflow.sdk namespace directly: + +.. code-block:: python + + from airflow.sdk import Connection, Variable + + conn = Connection.get("my_connection_id") + var = Variable.get("my_variable_name") You can read more about the public Airflow utilities in :doc:`howto/connection`, :doc:`core-concepts/variables`, :doc:`core-concepts/xcoms` @@ -166,18 +300,13 @@ You can read more about the public Airflow utilities in :doc:`howto/connection`, Reference for classes used for the utilities are here: -.. toctree:: - :includehidden: - :glob: - :maxdepth: 1 - - _api/airflow/models/connection/index - _api/airflow/models/variable/index - _api/airflow/models/xcom/index +.. note:: + Connection, Variable, and XCom classes are now part of the airflow.sdk namespace. + For detailed API documentation, see the `Task SDK Reference `_. Public Exceptions ------------------ +================= When writing the custom Operators and Hooks, you can handle and raise public Exceptions that Airflow exposes: @@ -190,7 +319,7 @@ exposes: _api/airflow/exceptions/index Public Utility classes ----------------------- +====================== .. toctree:: :includehidden: @@ -214,7 +343,7 @@ that do not require plugins - you can read more about them in :doc:`howto/custom Here are the ways how Plugins can be used to extend Airflow: Triggers --------- +======== Airflow uses Triggers to implement ``asyncio`` compatible Deferrable Operators. All Triggers derive from :class:`~airflow.triggers.base.BaseTrigger`. @@ -232,7 +361,7 @@ by extending them: You can read more about Triggers in :doc:`authoring-and-scheduling/deferring`. Timetables ----------- +========== Custom timetable implementations provide Airflow's scheduler additional logic to schedule DAG runs in ways not possible with built-in schedule expressions. @@ -250,7 +379,7 @@ by extending them: You can read more about Timetables in :doc:`howto/timetable`. Listeners ---------- +========= Listeners enable you to respond to DAG/Task lifecycle events. @@ -263,11 +392,8 @@ can be implemented to respond to DAG/Task lifecycle events. You can read more about Listeners in :doc:`administration-and-deployment/listeners`. -.. - TODO AIP-72: This class has been moved to task sdk but we cannot add a doc reference for it yet because task sdk doesn't have rendered docs yet. - Extra Links ------------ +=========== Extra links are dynamic links that could be added to Airflow independently from custom Operators. Normally they can be defined by the Operators, but plugins allow you to override the links on a global level. @@ -284,7 +410,7 @@ You can read more about providers :doc:`providers `. Executors ---------- +========= Executors are the mechanism by which task instances get run. All executors are derived from :class:`~airflow.executors.base_executor.BaseExecutor`. There are several @@ -304,10 +430,10 @@ You can read more about executors and how to write your own in :doc:`core-concep executors, and custom executors could not provide full functionality that built-in executors had. Secrets Backends ----------------- +================ Airflow can be configured to rely on secrets backends to retrieve -:class:`~airflow.models.connection.Connection` and :class:`~airflow.models.variable.Variable`. +:class:`~airflow.sdk.Connection` and :class:`~airflow.sdk.Variable`. All secrets backends derive from :class:`~airflow.secrets.base_secrets.BaseSecretsBackend`. All Secrets Backend implementations are public. You can extend their functionality: @@ -324,7 +450,7 @@ You can also find all the available Secrets Backends implemented in community pr in :doc:`apache-airflow-providers:core-extensions/secrets-backends`. Auth managers -------------- +============= Auth managers are responsible of user authentication and user authorization in Airflow. All auth managers are derived from :class:`~airflow.api_fastapi.auth.managers.base_auth_manager.BaseAuthManager`. @@ -335,21 +461,21 @@ public, but the different implementations of auth managers are not (i.e. FabAuth You can read more about auth managers and how to write your own in :doc:`core-concepts/auth-manager/index`. Connections ------------ +=========== When creating Hooks, you can add custom Connections. You can read more about connections in :doc:`apache-airflow-providers:core-extensions/connections` for available Connections implemented in the community providers. Extra Links ------------ +=========== When creating Hooks, you can add custom Extra Links that are displayed when the tasks are run. You can find out more about extra links in :doc:`apache-airflow-providers:core-extensions/extra-links` that also shows available extra links implemented in the community providers. Logging and Monitoring ----------------------- +====================== You can extend the way how logs are written by Airflow. You can find out more about log writing in :doc:`administration-and-deployment/logging-monitoring/index`. @@ -358,40 +484,44 @@ The :doc:`apache-airflow-providers:core-extensions/logging` that also shows avai implemented in the community providers. Decorators ----------- -DAG authors can use decorators to author dags using the :doc:`TaskFlow ` concept. -All Decorators derive from :class:`~airflow.decorators.base.TaskDecorator`. +========== +DAG Authors can use decorators to author dags using the :doc:`TaskFlow ` concept. +All Decorators derive from :class:`~airflow.sdk.bases.decorator.TaskDecorator`. + +The primary decorators for DAG Authors are now in the airflow.sdk namespace: +:func:`~airflow.sdk.dag`, :func:`~airflow.sdk.task`, :func:`~airflow.sdk.asset`, +:func:`~airflow.sdk.setup`, :func:`~airflow.sdk.task_group`, :func:`~airflow.sdk.teardown`, +:func:`~airflow.sdk.chain`, :func:`~airflow.sdk.chain_linear`, :func:`~airflow.sdk.cross_downstream`, +:func:`~airflow.sdk.get_current_context` and :func:`~airflow.sdk.get_parsing_context`. Airflow has a set of Decorators that are considered public. You are free to extend their functionality by extending them: -.. toctree:: - :includehidden: - :maxdepth: 1 - - _api/airflow/decorators/index +.. note:: + Decorators are now part of the airflow.sdk namespace. + For detailed API documentation, see the `Task SDK Reference `_. You can read more about creating custom Decorators in :doc:`howto/create-custom-decorator`. Email notifications -------------------- +=================== Airflow has a built-in way of sending email notifications and it allows to extend it by adding custom email notification classes. You can read more about email notifications in :doc:`howto/email-config`. Notifications -------------- +============= Airflow has a built-in extensible way of sending notifications using the various ``on_*_callback``. You can read more about notifications in :doc:`howto/notifications`. Cluster Policies ----------------- +================ Cluster Policies are the way to dynamically apply cluster-wide policies to the dags being parsed or tasks being executed. You can read more about Cluster Policies in :doc:`administration-and-deployment/cluster-policies`. Lineage -------- +======= Airflow can help track origins of data, what happens to it and where it moves over time. You can read more about lineage in :doc:`administration-and-deployment/lineage`. @@ -417,3 +547,49 @@ but in Airflow they are not parts of the Public Interface and might change any t * Python classes except those explicitly mentioned in this document, are considered an internal implementation detail and you should not assume they will be maintained in a backwards-compatible way. + +**Direct metadata database access from task code is no longer allowed**. +Task code cannot directly access the metadata database to query DAG state, task history, +or DAG runs. Instead, use one of the following alternatives: + +* **Task Context**: Use :func:`~airflow.sdk.get_current_context` to access task instance + information and methods like :meth:`~airflow.sdk.types.RuntimeTaskInstanceProtocol.get_dr_count`, + :meth:`~airflow.sdk.types.RuntimeTaskInstanceProtocol.get_dagrun_state`, and + :meth:`~airflow.sdk.types.RuntimeTaskInstanceProtocol.get_task_states`. + +* **REST API**: Use the :doc:`Stable REST API ` for programmatic + access to Airflow metadata. + +* **Python Client**: Use the `Python Client `_ for Python-based + interactions with Airflow. + +This change improves architectural separation and enables remote execution capabilities. + +Example of using Task Context instead of direct database access: + +.. code-block:: python + + from airflow.sdk import dag, get_current_context, task + from airflow.utils.state import DagRunState + from datetime import datetime + + + @dag(dag_id="example_dag", start_date=datetime(2025, 1, 1), schedule="@hourly", tags=["misc"], catchup=False) + def example_dag(): + + @task(task_id="check_dagrun_state") + def check_state(): + context = get_current_context() + ti = context["ti"] + dag_run = context["dag_run"] + + # Use Task Context methods instead of direct DB access + dr_count = ti.get_dr_count(dag_id="example_dag") + dagrun_state = ti.get_dagrun_state(dag_id="example_dag", run_id=dag_run.run_id) + + return f"DAG run count: {dr_count}, current state: {dagrun_state}" + + check_state() + + + example_dag() diff --git a/airflow-core/docs/tutorial/fundamentals.rst b/airflow-core/docs/tutorial/fundamentals.rst index 20c93e2737699..1cce983931500 100644 --- a/airflow-core/docs/tutorial/fundamentals.rst +++ b/airflow-core/docs/tutorial/fundamentals.rst @@ -90,7 +90,7 @@ Next, we'll need to create a DAG object to house our tasks. We'll provide a uniq Understanding Operators ----------------------- An operator represents a unit of work in Airflow. They are the building blocks of your workflows, allowing you to -define what tasks will be executed. While we can use operators for many tasks, Airflow also offers the :doc:`Taskflow API ` +define what tasks will be executed. While we can use operators for many tasks, Airflow also offers the :doc:`TaskFlow API ` for a more Pythonic way to define workflows, which we'll touch on later. All operators derive from the ``BaseOperator``, which includes the essential arguments needed to run tasks in Airflow. diff --git a/airflow-core/docs/tutorial/objectstorage.rst b/airflow-core/docs/tutorial/objectstorage.rst index 59c4142f7ce85..de80e8c973c16 100644 --- a/airflow-core/docs/tutorial/objectstorage.rst +++ b/airflow-core/docs/tutorial/objectstorage.rst @@ -23,7 +23,7 @@ Cloud-Native Workflows with Object Storage .. versionadded:: 2.8 -Welcome to the final tutorial in our Airflow series! By now, you've built DAGs with Python and the Taskflow API, passed +Welcome to the final tutorial in our Airflow series! By now, you've built DAGs with Python and the TaskFlow API, passed data with XComs, and chained tasks together into clear, reusable workflows. In this tutorial we'll take it a step further by introducing the **Object Storage API**. This API makes it easier to @@ -108,7 +108,7 @@ Here's what's happening: - We generate a filename based on the task's logical date - Using ``ObjectStoragePath``, we write the data directly to cloud storage as Parquet -This is a classic Taskflow pattern. The object key changes each day, allowing us to run this daily and build a dataset +This is a classic TaskFlow pattern. The object key changes each day, allowing us to run this daily and build a dataset over time. We return the final object path to be used in the next task. Why this is cool: No boto3, no GCS client setup, no credentials juggling. Just simple file semantics that work across diff --git a/airflow-core/newsfragments/52860.significant.rst b/airflow-core/newsfragments/52860.significant.rst new file mode 100644 index 0000000000000..5962897ec206d --- /dev/null +++ b/airflow-core/newsfragments/52860.significant.rst @@ -0,0 +1,17 @@ +Replace API server ``access_logfile`` configuration with ``log_config`` + +The API server configuration option ``[api] access_logfile`` has been replaced with ``[api] log_config`` to align with uvicorn's logging configuration instead of the legacy gunicorn approach. +The new ``log_config`` option accepts a path to a logging configuration file compatible with ``logging.config.fileConfig``, providing more flexible logging configuration for the API server. + +This change also removes the dependency on gunicorn for daemonization, making the API server ``--daemon`` option consistent with other Airflow components like scheduler and triggerer. + +* Types of change + + * [ ] Dag changes + * [x] Config changes + * [ ] API changes + * [ ] CLI changes + * [ ] Behaviour changes + * [ ] Plugin changes + * [ ] Dependency changes + * [ ] Code interface changes diff --git a/airflow-core/pyproject.toml b/airflow-core/pyproject.toml index d5d03e4021c02..3646ccabdea7b 100644 --- a/airflow-core/pyproject.toml +++ b/airflow-core/pyproject.toml @@ -22,7 +22,7 @@ requires = [ "hatchling==1.27.0", "packaging==25.0", "pathspec==0.12.1", - "pluggy==1.5.0", + "pluggy==1.6.0", "smmap==5.0.2", "tomli==2.2.1; python_version < '3.11'", "trove-classifiers==2025.5.9.12", @@ -35,7 +35,7 @@ name = "apache-airflow-core" description = "Core packages for Apache Airflow, schedule and API server" readme = { file = "README.md", content-type = "text/markdown" } license-files.globs = ["LICENSE", "3rd-party-licenses/*.txt", "NOTICE"] -requires-python = "~=3.10,<3.13" +requires-python = ">=3.10,!=3.13" authors = [ { name = "Apache Software Foundation", email = "dev@airflow.apache.org" }, ] @@ -80,10 +80,6 @@ dependencies = [ # 0.115.10 fastapi was a bad release that broke our API's and static checks. # Related fastapi issue here: https://github.com/fastapi/fastapi/discussions/13431 "fastapi[standard]>=0.115.0,!=0.115.10", - # We could get rid of flask and gunicorn if we replace serve_logs with a starlette + unicorn - "flask>=2.1.1", - # We could get rid of flask and gunicorn if we replace serve_logs with a starlette + unicorn - "gunicorn>=20.1.0", "httpx>=0.25.0", 'importlib_metadata>=6.5;python_version<"3.12"', 'importlib_metadata>=7.0;python_version>="3.12"', @@ -107,7 +103,7 @@ dependencies = [ # This may be removed when future versions of pip are able # to handle this dependency resolution automatically. "opentelemetry-proto<9999", - "packaging>=23.2", + "packaging>=25.0", "pathspec>=0.9.0", 'pendulum>=2.1.2,<4.0;python_version<"3.12"', 'pendulum>=3.0.0,<4.0;python_version>="3.12"', @@ -215,6 +211,7 @@ include = [ exclude = [ "src/airflow/ui/node_modules/", "src/airflow/api_fastapi/auth/managers/simple/ui/node_modules", + "src/airflow/ui/openapi.merged.json", ] [tool.hatch.build.targets.custom] @@ -237,6 +234,7 @@ artifacts = [ exclude = [ "src/airflow/ui/node_modules/", "src/airflow/api_fastapi/auth/managers/simple/ui/node_modules", + "src/airflow/ui/openapi.merged.json", ] [dependency-groups] diff --git a/airflow-core/src/airflow/api/common/mark_tasks.py b/airflow-core/src/airflow/api/common/mark_tasks.py index ad8c7fe4928cd..80b018006b448 100644 --- a/airflow-core/src/airflow/api/common/mark_tasks.py +++ b/airflow-core/src/airflow/api/common/mark_tasks.py @@ -34,7 +34,7 @@ from sqlalchemy.orm import Session as SASession from airflow.models.dag import DAG - from airflow.models.operator import Operator + from airflow.sdk.types import Operator @provide_session diff --git a/airflow-core/src/airflow/api/common/trigger_dag.py b/airflow-core/src/airflow/api/common/trigger_dag.py index a1406695e6808..186ce9499a70a 100644 --- a/airflow-core/src/airflow/api/common/trigger_dag.py +++ b/airflow-core/src/airflow/api/common/trigger_dag.py @@ -69,6 +69,7 @@ def _trigger_dag( raise DagNotFound(f"Dag id {dag_id} not found") run_after = run_after or timezone.coerce_datetime(timezone.utcnow()) + coerced_logical_date: datetime | None = None if logical_date: if not timezone.is_localized(logical_date): raise ValueError("The logical date should be localized") @@ -86,7 +87,6 @@ def _trigger_dag( coerced_logical_date = timezone.coerce_datetime(logical_date) data_interval = dag.timetable.infer_manual_data_interval(run_after=run_after) else: - coerced_logical_date = None data_interval = None run_id = run_id or DagRun.generate_run_id( diff --git a/airflow-core/src/airflow/api_fastapi/app.py b/airflow-core/src/airflow/api_fastapi/app.py index 36b59630d0b65..f515be6992c60 100644 --- a/airflow-core/src/airflow/api_fastapi/app.py +++ b/airflow-core/src/airflow/api_fastapi/app.py @@ -30,6 +30,7 @@ init_error_handlers, init_flask_plugins, init_middlewares, + init_ui_plugins, init_views, ) from airflow.api_fastapi.execution_api.app import create_task_execution_api_app @@ -93,6 +94,7 @@ def create_app(apps: str = "all") -> FastAPI: init_plugins(app) init_auth_manager(app) init_flask_plugins(app) + init_ui_plugins(app) init_views(app) # Core views need to be the last routes added - it has a catch all route init_error_handlers(app) init_middlewares(app) diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/base_auth_manager.py b/airflow-core/src/airflow/api_fastapi/auth/managers/base_auth_manager.py index 629d9862164d7..321bcb123aebe 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/base_auth_manager.py +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/base_auth_manager.py @@ -20,7 +20,7 @@ import logging from abc import ABCMeta, abstractmethod from functools import cache -from typing import TYPE_CHECKING, Any, Generic, TypeVar +from typing import TYPE_CHECKING, Any, Generic, Literal, TypeVar from jwt import InvalidTokenError from sqlalchemy import select @@ -36,7 +36,6 @@ from airflow.api_fastapi.common.types import ExtraMenuItem, MenuItem from airflow.configuration import conf from airflow.models import DagModel -from airflow.typing_compat import Literal from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.session import NEW_SESSION, provide_session @@ -47,10 +46,7 @@ from sqlalchemy.orm import Session from airflow.api_fastapi.auth.managers.models.batch_apis import ( - IsAuthorizedConnectionRequest, IsAuthorizedDagRequest, - IsAuthorizedPoolRequest, - IsAuthorizedVariableRequest, ) from airflow.api_fastapi.auth.managers.models.resource_details import ( AccessView, @@ -66,7 +62,10 @@ # This cannot be in the TYPE_CHECKING block since some providers import it globally. # TODO: Move this inside once all providers drop Airflow 2.x support. -ResourceMethod = Literal["GET", "POST", "PUT", "DELETE", "MENU"] +# List of methods (or actions) a user can do against a resource +ResourceMethod = Literal["GET", "POST", "PUT", "DELETE"] +# Extends ``ResourceMethod`` to include "MENU". The method "MENU" is only supported with specific resources (menu items) +ExtendedResourceMethod = Literal["GET", "POST", "PUT", "DELETE", "MENU"] log = logging.getLogger(__name__) T = TypeVar("T", bound=BaseUser) @@ -304,27 +303,6 @@ def filter_authorized_menu_items(self, menu_items: list[MenuItem], *, user: T) - :param user: the user """ - def batch_is_authorized_connection( - self, - requests: Sequence[IsAuthorizedConnectionRequest], - *, - user: T, - ) -> bool: - """ - Batch version of ``is_authorized_connection``. - - By default, calls individually the ``is_authorized_connection`` API on each item in the list of - requests, which can lead to some poor performance. It is recommended to override this method in the auth - manager implementation to provide a more efficient implementation. - - :param requests: a list of requests containing the parameters for ``is_authorized_connection`` - :param user: the user to performing the action - """ - return all( - self.is_authorized_connection(method=request["method"], details=request.get("details"), user=user) - for request in requests - ) - def batch_is_authorized_dag( self, requests: Sequence[IsAuthorizedDagRequest], @@ -351,48 +329,6 @@ def batch_is_authorized_dag( for request in requests ) - def batch_is_authorized_pool( - self, - requests: Sequence[IsAuthorizedPoolRequest], - *, - user: T, - ) -> bool: - """ - Batch version of ``is_authorized_pool``. - - By default, calls individually the ``is_authorized_pool`` API on each item in the list of - requests. Can lead to some poor performance. It is recommended to override this method in the auth - manager implementation to provide a more efficient implementation. - - :param requests: a list of requests containing the parameters for ``is_authorized_pool`` - :param user: the user to performing the action - """ - return all( - self.is_authorized_pool(method=request["method"], details=request.get("details"), user=user) - for request in requests - ) - - def batch_is_authorized_variable( - self, - requests: Sequence[IsAuthorizedVariableRequest], - *, - user: T, - ) -> bool: - """ - Batch version of ``is_authorized_variable``. - - By default, calls individually the ``is_authorized_variable`` API on each item in the list of - requests. Can lead to some poor performance. It is recommended to override this method in the auth - manager implementation to provide a more efficient implementation. - - :param requests: a list of requests containing the parameters for ``is_authorized_variable`` - :param user: the user to performing the action - """ - return all( - self.is_authorized_variable(method=request["method"], details=request.get("details"), user=user) - for request in requests - ) - @provide_session def get_authorized_dag_ids( self, diff --git a/airflow-core/src/airflow/api_fastapi/auth/managers/models/batch_apis.py b/airflow-core/src/airflow/api_fastapi/auth/managers/models/batch_apis.py index 2fe11b659af6e..5acdd3edee5f2 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/managers/models/batch_apis.py +++ b/airflow-core/src/airflow/api_fastapi/auth/managers/models/batch_apis.py @@ -22,38 +22,14 @@ if TYPE_CHECKING: from airflow.api_fastapi.auth.managers.base_auth_manager import ResourceMethod from airflow.api_fastapi.auth.managers.models.resource_details import ( - ConnectionDetails, DagAccessEntity, DagDetails, - PoolDetails, - VariableDetails, ) -class IsAuthorizedConnectionRequest(TypedDict, total=False): - """Represent the parameters of ``is_authorized_connection`` API in the auth manager.""" - - method: ResourceMethod - details: ConnectionDetails | None - - class IsAuthorizedDagRequest(TypedDict, total=False): """Represent the parameters of ``is_authorized_dag`` API in the auth manager.""" method: ResourceMethod access_entity: DagAccessEntity | None details: DagDetails | None - - -class IsAuthorizedPoolRequest(TypedDict, total=False): - """Represent the parameters of ``is_authorized_pool`` API in the auth manager.""" - - method: ResourceMethod - details: PoolDetails | None - - -class IsAuthorizedVariableRequest(TypedDict, total=False): - """Represent the parameters of ``is_authorized_variable`` API in the auth manager.""" - - method: ResourceMethod - details: VariableDetails | None diff --git a/airflow-core/src/airflow/api_fastapi/common/db/dags.py b/airflow-core/src/airflow/api_fastapi/common/db/dags.py index cdc4bf91be9da..36283f86c55eb 100644 --- a/airflow-core/src/airflow/api_fastapi/common/db/dags.py +++ b/airflow-core/src/airflow/api_fastapi/common/db/dags.py @@ -19,7 +19,7 @@ from typing import TYPE_CHECKING -from sqlalchemy import func, null, select +from sqlalchemy import func, select from airflow.api_fastapi.common.db.common import ( apply_filters_to_select, @@ -37,7 +37,6 @@ def generate_dag_with_latest_run_query(max_run_filters: list[BaseParam], order_b max_run_id_query = ( # ordering by id will not always be "latest run", but it's a simplifying assumption select(DagRun.dag_id, func.max(DagRun.id).label("max_dag_run_id")) - .where(DagRun.start_date.is_not(null())) .group_by(DagRun.dag_id) .subquery(name="mrq") ) diff --git a/airflow-core/src/airflow/api_fastapi/common/parameters.py b/airflow-core/src/airflow/api_fastapi/common/parameters.py index 14ab5efa8ef4d..4cf35d68871ba 100644 --- a/airflow-core/src/airflow/api_fastapi/common/parameters.py +++ b/airflow-core/src/airflow/api_fastapi/common/parameters.py @@ -34,10 +34,11 @@ from fastapi import Depends, HTTPException, Query, status from pendulum.parsing.exceptions import ParserError from pydantic import AfterValidator, BaseModel, NonNegativeInt -from sqlalchemy import Column, and_, case, func, or_ +from sqlalchemy import Column, and_, case, func, not_, or_, select from sqlalchemy.inspection import inspect from airflow.api_fastapi.core_api.base import OrmClause +from airflow.api_fastapi.core_api.security import GetUserDep from airflow.models import Base from airflow.models.asset import ( AssetAliasModel, @@ -48,6 +49,7 @@ ) from airflow.models.connection import Connection from airflow.models.dag import DagModel, DagTag +from airflow.models.dag_favorite import DagFavorite from airflow.models.dag_version import DagVersion from airflow.models.dagrun import DagRun from airflow.models.pool import Pool @@ -109,6 +111,37 @@ def depends(cls, offset: NonNegativeInt = 0) -> OffsetFilter: return cls().set_value(offset) +class _FavoriteFilter(BaseParam[bool]): + """Filter DAGs by favorite status.""" + + user_id: str + + def to_orm(self, select_stmt: Select) -> Select: + if self.value is None and self.skip_none: + return select_stmt + + if self.value: + select_stmt = select_stmt.join(DagFavorite, DagFavorite.dag_id == DagModel.dag_id).where( + DagFavorite.user_id == self.user_id + ) + else: + select_stmt = select_stmt.where( + not_( + select(DagFavorite) + .where(and_(DagFavorite.dag_id == DagModel.dag_id, DagFavorite.user_id == self.user_id)) + .exists() + ) + ) + + return select_stmt + + @classmethod + def depends(cls, user: GetUserDep, is_favorite: bool | None = Query(None)) -> _FavoriteFilter: + instance = cls().set_value(is_favorite) + instance.user_id = str(user.get_id()) + return instance + + class _ExcludeStaleFilter(BaseParam[bool]): """Filter on is_stale.""" @@ -526,6 +559,7 @@ def depends_float( FilterParam[bool | None], Depends(filter_param_factory(DagModel.is_paused, bool | None, filter_name="paused")), ] +QueryFavoriteFilter = Annotated[_FavoriteFilter, Depends(_FavoriteFilter.depends)] QueryExcludeStaleFilter = Annotated[_ExcludeStaleFilter, Depends(_ExcludeStaleFilter.depends)] QueryDagIdPatternSearch = Annotated[ _SearchParam, Depends(search_param_factory(DagModel.dag_id, "dag_id_pattern")) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/app.py b/airflow-core/src/airflow/api_fastapi/core_api/app.py index ad8938de994b6..bafa3add823c8 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/app.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/app.py @@ -181,3 +181,10 @@ def init_middlewares(app: FastAPI) -> None: from airflow.api_fastapi.auth.managers.simple.middleware import SimpleAllAdminMiddleware app.add_middleware(SimpleAllAdminMiddleware) + + +def init_ui_plugins(app: FastAPI) -> None: + """Initialize UI plugins.""" + from airflow import plugins_manager + + plugins_manager.initialize_ui_plugins() diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py new file mode 100644 index 0000000000000..88ad702316423 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/hitl.py @@ -0,0 +1,77 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from collections.abc import Mapping +from datetime import datetime +from typing import Any + +from pydantic import Field, field_validator + +from airflow.api_fastapi.core_api.base import BaseModel +from airflow.sdk import Param + + +class UpdateHITLDetailPayload(BaseModel): + """Schema for updating the content of a Human-in-the-loop detail.""" + + chosen_options: list[str] + params_input: Mapping = Field(default_factory=dict) + + +class HITLDetailResponse(BaseModel): + """Response of updating a Human-in-the-loop detail.""" + + user_id: str + response_at: datetime + chosen_options: list[str] + params_input: Mapping = Field(default_factory=dict) + + +class HITLDetail(BaseModel): + """Schema for Human-in-the-loop detail.""" + + ti_id: str + + # User Request Detail + options: list[str] + subject: str + body: str | None = None + defaults: list[str] | None = None + multiple: bool = False + params: dict[str, Any] = Field(default_factory=dict) + + # Response Content Detail + user_id: str | None = None + response_at: datetime | None = None + chosen_options: list[str] | None = None + params_input: dict[str, Any] = Field(default_factory=dict) + + response_received: bool = False + + @field_validator("params", mode="before") + @classmethod + def get_params(cls, params: dict[str, Any]) -> dict[str, Any]: + """Convert params attribute to dict representation.""" + return {k: v.dump() if isinstance(v, Param) else v for k, v in params.items()} + + +class HITLDetailCollection(BaseModel): + """Schema for a collection of Human-in-the-loop details.""" + + hitl_details: list[HITLDetail] + total_entries: int diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/task_instances.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/task_instances.py index 392fcc598df71..e2470119a1c80 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/task_instances.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/task_instances.py @@ -44,6 +44,7 @@ class TaskInstanceResponse(BaseModel): id: str task_id: str dag_id: str + dag_version: DagVersionResponse run_id: str = Field(alias="dag_run_id") map_index: int logical_date: datetime | None @@ -76,7 +77,6 @@ class TaskInstanceResponse(BaseModel): ) trigger: TriggerResponse | None queued_by_job: JobResponse | None = Field(alias="triggerer_job") - dag_version: DagVersionResponse | None class TaskInstanceCollectionResponse(BaseModel): diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/calendar.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/calendar.py new file mode 100644 index 0000000000000..c3cc5a53d8cd1 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/calendar.py @@ -0,0 +1,45 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime +from typing import Literal + +from pydantic import BaseModel + +from airflow.utils.state import DagRunState + + +class CalendarTimeRangeResponse(BaseModel): + """Represents a summary of DAG runs for a specific calendar time range.""" + + date: datetime + state: Literal[ + DagRunState.QUEUED, + DagRunState.RUNNING, + DagRunState.SUCCESS, + DagRunState.FAILED, + "planned", + ] + count: int + + +class CalendarTimeRangeCollectionResponse(BaseModel): + """Response model for calendar time range results.""" + + total_entries: int + dag_runs: list[CalendarTimeRangeResponse] diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/common.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/common.py index 0f315326194e5..6089d6d55dfc4 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/common.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/common.py @@ -23,6 +23,7 @@ from pydantic import computed_field from airflow.api_fastapi.core_api.base import BaseModel +from airflow.utils import timezone from airflow.utils.state import TaskInstanceState from airflow.utils.types import DagRunType @@ -80,10 +81,11 @@ class GridRunsResponse(BaseModel): run_type: DagRunType @computed_field - def duration(self) -> int | None: - if self.start_date and self.end_date: - return (self.end_date - self.start_date).seconds - return None + def duration(self) -> int: + if self.start_date: + end_date = self.end_date or timezone.utcnow() + return (end_date - self.start_date).seconds + return 0 class BaseGraphResponse(BaseModel, Generic[E, N]): diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/grid.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/grid.py index e518963781856..b523dce96ffaa 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/grid.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/ui/grid.py @@ -19,24 +19,9 @@ from datetime import datetime -from pydantic import BaseModel, Field +from pydantic import BaseModel -from airflow.utils.state import DagRunState, TaskInstanceState -from airflow.utils.types import DagRunType - - -class GridTaskInstanceSummary(BaseModel): - """Task Instance Summary model for the Grid UI.""" - - task_id: str - try_number: int - start_date: datetime | None - end_date: datetime | None - queued_dttm: datetime | None - child_states: dict[str, int] | None - task_count: int - state: TaskInstanceState | None - note: str | None +from airflow.utils.state import TaskInstanceState class LightGridTaskInstanceSummary(BaseModel): @@ -44,37 +29,14 @@ class LightGridTaskInstanceSummary(BaseModel): task_id: str state: TaskInstanceState | None - child_states: dict[TaskInstanceState, int] | None + child_states: dict[TaskInstanceState | None, int] | None min_start_date: datetime | None max_end_date: datetime | None -class GridDAGRunwithTIs(BaseModel): - """DAG Run model for the Grid UI.""" - - run_id: str = Field(serialization_alias="dag_run_id", validation_alias="run_id") - queued_at: datetime | None - start_date: datetime | None - end_date: datetime | None - run_after: datetime - state: DagRunState - run_type: DagRunType - logical_date: datetime | None - data_interval_start: datetime | None - data_interval_end: datetime | None - note: str | None - task_instances: list[GridTaskInstanceSummary] - - class GridTISummaries(BaseModel): """DAG Run model for the Grid UI.""" run_id: str dag_id: str task_instances: list[LightGridTaskInstanceSummary] - - -class GridResponse(BaseModel): - """Response model for the Grid UI.""" - - dag_runs: list[GridDAGRunwithTIs] diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml index 8b7bded786643..450cdc3036e28 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml +++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/_private_ui.yaml @@ -218,6 +218,14 @@ paths: type: string default: dag_id title: Order By + - name: is_favorite + in: query + required: false + schema: + anyOf: + - type: boolean + - type: 'null' + title: Is Favorite responses: '200': description: Successful Response @@ -466,144 +474,6 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /ui/grid/{dag_id}: - get: - tags: - - Grid - summary: Grid Data - description: Return grid data. - operationId: grid_data - security: - - OAuth2PasswordBearer: [] - parameters: - - name: dag_id - in: path - required: true - schema: - type: string - title: Dag Id - - name: include_upstream - in: query - required: false - schema: - type: boolean - default: false - title: Include Upstream - - name: include_downstream - in: query - required: false - schema: - type: boolean - default: false - title: Include Downstream - - name: root - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Root - - name: offset - in: query - required: false - schema: - type: integer - minimum: 0 - default: 0 - title: Offset - - name: run_type - in: query - required: false - schema: - type: array - items: - type: string - title: Run Type - - name: state - in: query - required: false - schema: - type: array - items: - type: string - title: State - - name: limit - in: query - required: false - schema: - type: integer - minimum: 0 - default: 50 - title: Limit - - name: order_by - in: query - required: false - schema: - type: string - default: id - title: Order By - - name: run_after_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After Gte - - name: run_after_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Run After Lte - - name: logical_date_gte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date Gte - - name: logical_date_lte - in: query - required: false - schema: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date Lte - responses: - '200': - description: Successful Response - content: - application/json: - schema: - $ref: '#/components/schemas/GridResponse' - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found - '422': - description: Validation Error - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' /ui/grid/structure/{dag_id}: get: tags: @@ -891,6 +761,64 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /ui/calendar/{dag_id}: + get: + tags: + - Calendar + summary: Get Calendar + description: Get calendar data for a DAG including historical and planned DAG + runs. + operationId: get_calendar + security: + - OAuth2PasswordBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: granularity + in: query + required: false + schema: + enum: + - hourly + - daily + type: string + default: daily + title: Granularity + - name: logical_date_gte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Gte + - name: logical_date_lte + in: query + required: false + schema: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date Lte + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CalendarTimeRangeCollectionResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' components: schemas: BackfillCollectionResponse: @@ -1032,6 +960,47 @@ components: - type title: BaseNodeResponse description: Base Node serializer for responses. + CalendarTimeRangeCollectionResponse: + properties: + total_entries: + type: integer + title: Total Entries + dag_runs: + items: + $ref: '#/components/schemas/CalendarTimeRangeResponse' + type: array + title: Dag Runs + type: object + required: + - total_entries + - dag_runs + title: CalendarTimeRangeCollectionResponse + description: Response model for calendar time range results. + CalendarTimeRangeResponse: + properties: + date: + type: string + format: date-time + title: Date + state: + type: string + enum: + - queued + - running + - success + - failed + - planned + title: State + count: + type: integer + title: Count + type: object + required: + - date + - state + - count + title: CalendarTimeRangeResponse + description: Represents a summary of DAG runs for a specific calendar time range. ConfigResponse: properties: page_size: @@ -1687,81 +1656,6 @@ components: - text - href title: ExtraMenuItem - GridDAGRunwithTIs: - properties: - dag_run_id: - type: string - title: Dag Run Id - queued_at: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Queued At - start_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date - end_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date - run_after: - type: string - format: date-time - title: Run After - state: - $ref: '#/components/schemas/DagRunState' - run_type: - $ref: '#/components/schemas/DagRunType' - logical_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Logical Date - data_interval_start: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Data Interval Start - data_interval_end: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Data Interval End - note: - anyOf: - - type: string - - type: 'null' - title: Note - task_instances: - items: - $ref: '#/components/schemas/GridTaskInstanceSummary' - type: array - title: Task Instances - type: object - required: - - dag_run_id - - queued_at - - start_date - - end_date - - run_after - - state - - run_type - - logical_date - - data_interval_start - - data_interval_end - - note - - task_instances - title: GridDAGRunwithTIs - description: DAG Run model for the Grid UI. GridNodeResponse: properties: id: @@ -1797,18 +1691,6 @@ components: - is_mapped title: GridNodeResponse description: Base Node serializer for responses. - GridResponse: - properties: - dag_runs: - items: - $ref: '#/components/schemas/GridDAGRunwithTIs' - type: array - title: Dag Runs - type: object - required: - - dag_runs - title: GridResponse - description: Response model for the Grid UI. GridRunsResponse: properties: dag_id: @@ -1846,9 +1728,7 @@ components: run_type: $ref: '#/components/schemas/DagRunType' duration: - anyOf: - - type: integer - - type: 'null' + type: integer title: Duration readOnly: true type: object @@ -1884,64 +1764,6 @@ components: - task_instances title: GridTISummaries description: DAG Run model for the Grid UI. - GridTaskInstanceSummary: - properties: - task_id: - type: string - title: Task Id - try_number: - type: integer - title: Try Number - start_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Start Date - end_date: - anyOf: - - type: string - format: date-time - - type: 'null' - title: End Date - queued_dttm: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Queued Dttm - child_states: - anyOf: - - additionalProperties: - type: integer - type: object - - type: 'null' - title: Child States - task_count: - type: integer - title: Task Count - state: - anyOf: - - $ref: '#/components/schemas/TaskInstanceState' - - type: 'null' - note: - anyOf: - - type: string - - type: 'null' - title: Note - type: object - required: - - task_id - - try_number - - start_date - - end_date - - queued_dttm - - child_states - - task_count - - state - - note - title: GridTaskInstanceSummary - description: Task Instance Summary model for the Grid UI. HTTPExceptionResponse: properties: detail: @@ -2015,8 +1837,6 @@ components: anyOf: - additionalProperties: type: integer - propertyNames: - $ref: '#/components/schemas/TaskInstanceState' type: object - type: 'null' title: Child States diff --git a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml index 9a47eba0c10a2..ae9a645fc2e0f 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml +++ b/airflow-core/src/airflow/api_fastapi/core_api/openapi/v2-rest-api-generated.yaml @@ -2041,6 +2041,18 @@ paths: type: string default: id title: Order By + - name: run_id_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." + title: Run Id Pattern + description: "SQL LIKE expression \u2014 use `%` / `_` wildcards (e.g. `%customer_%`).\ + \ Regular expressions are **not** supported." responses: '200': description: Successful Response @@ -2135,6 +2147,90 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/wait: + get: + tags: + - DagRun + - experimental + summary: 'Experimental: Wait for a dag run to complete, and return task results + if requested.' + description: "\U0001F6A7 This is an experimental endpoint and may change or\ + \ be removed without notice." + operationId: wait_dag_run_until_finished + security: + - OAuth2PasswordBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: interval + in: query + required: true + schema: + type: number + exclusiveMinimum: 0.0 + description: Seconds to wait between dag run state checks + title: Interval + description: Seconds to wait between dag run state checks + - name: result + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + description: Collect result XCom from task. Can be set multiple times. + title: Result + description: Collect result XCom from task. Can be set multiple times. + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + application/x-ndjson: + schema: + type: string + example: '{"state": "running"} + + {"state": "success", "results": {"op": 42}} + + ' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /api/v2/dags/{dag_id}/dagRuns/list: post: tags: @@ -2758,6 +2854,14 @@ paths: type: string default: dag_id title: Order By + - name: is_favorite + in: query + required: false + schema: + anyOf: + - type: boolean + - type: 'null' + title: Is Favorite responses: '200': description: Successful Response @@ -3140,6 +3244,98 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/favorite: + post: + tags: + - DAG + summary: Favorite Dag + description: Mark the DAG as favorite. + operationId: favorite_dag + security: + - OAuth2PasswordBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/dags/{dag_id}/unfavorite: + post: + tags: + - DAG + summary: Unfavorite Dag + description: Unmark the DAG as favorite. + operationId: unfavorite_dag + security: + - OAuth2PasswordBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' /api/v2/eventLogs/{event_log_id}: get: tags: @@ -6508,7 +6704,7 @@ paths: required: true schema: type: integer - exclusiveMinimum: 0 + minimum: 0 title: Try Number - name: full_content in: query @@ -6926,6 +7122,304 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}: + patch: + tags: + - HumanInTheLoop + summary: Update Hitl Detail + description: Update a Human-in-the-loop detail. + operationId: update_hitl_detail + security: + - OAuth2PasswordBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateHITLDetailPayload' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLDetailResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - HumanInTheLoop + summary: Get Hitl Detail + description: Get a Human-in-the-loop detail of a specific task instance. + operationId: get_hitl_detail + security: + - OAuth2PasswordBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLDetail' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}/{map_index}: + patch: + tags: + - HumanInTheLoop + summary: Update Mapped Ti Hitl Detail + description: Update a Human-in-the-loop detail. + operationId: update_mapped_ti_hitl_detail + security: + - OAuth2PasswordBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateHITLDetailPayload' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLDetailResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - HumanInTheLoop + summary: Get Mapped Ti Hitl Detail + description: Get a Human-in-the-loop detail of a specific task instance. + operationId: get_mapped_ti_hitl_detail + security: + - OAuth2PasswordBearer: [] + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLDetail' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /api/v2/hitl-details/: + get: + tags: + - HumanInTheLoop + summary: Get Hitl Details + description: Get Human-in-the-loop details. + operationId: get_hitl_details + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HITLDetailCollection' + '401': + description: Unauthorized + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + '403': + description: Forbidden + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + security: + - OAuth2PasswordBearer: [] /api/v2/monitor/health: get: tags: @@ -9395,6 +9889,113 @@ components: - name title: FastAPIRootMiddlewareResponse description: Serializer for Plugin FastAPI root middleware responses. + HITLDetail: + properties: + ti_id: + type: string + title: Ti Id + options: + items: + type: string + type: array + title: Options + subject: + type: string + title: Subject + body: + anyOf: + - type: string + - type: 'null' + title: Body + defaults: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Defaults + multiple: + type: boolean + title: Multiple + default: false + params: + additionalProperties: true + type: object + title: Params + user_id: + anyOf: + - type: string + - type: 'null' + title: User Id + response_at: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Response At + chosen_options: + anyOf: + - items: + type: string + type: array + - type: 'null' + title: Chosen Options + params_input: + additionalProperties: true + type: object + title: Params Input + response_received: + type: boolean + title: Response Received + default: false + type: object + required: + - ti_id + - options + - subject + title: HITLDetail + description: Schema for Human-in-the-loop detail. + HITLDetailCollection: + properties: + hitl_details: + items: + $ref: '#/components/schemas/HITLDetail' + type: array + title: Hitl Details + total_entries: + type: integer + title: Total Entries + type: object + required: + - hitl_details + - total_entries + title: HITLDetailCollection + description: Schema for a collection of Human-in-the-loop details. + HITLDetailResponse: + properties: + user_id: + type: string + title: User Id + response_at: + type: string + format: date-time + title: Response At + chosen_options: + items: + type: string + type: array + title: Chosen Options + params_input: + additionalProperties: true + type: object + title: Params Input + type: object + required: + - user_id + - response_at + - chosen_options + title: HITLDetailResponse + description: Response of updating a Human-in-the-loop detail. HTTPExceptionResponse: properties: detail: @@ -10277,6 +10878,8 @@ components: dag_id: type: string title: Dag Id + dag_version: + $ref: '#/components/schemas/DagVersionResponse' dag_run_id: type: string title: Dag Run Id @@ -10404,15 +11007,12 @@ components: anyOf: - $ref: '#/components/schemas/JobResponse' - type: 'null' - dag_version: - anyOf: - - $ref: '#/components/schemas/DagVersionResponse' - - type: 'null' type: object required: - id - task_id - dag_id + - dag_version - dag_run_id - map_index - logical_date @@ -10441,7 +11041,6 @@ components: - rendered_map_index - trigger - triggerer_job - - dag_version title: TaskInstanceResponse description: TaskInstance serializer for responses. TaskInstanceState: @@ -10926,6 +11525,22 @@ components: - latest_triggerer_heartbeat title: TriggererInfoResponse description: Triggerer info serializer for responses. + UpdateHITLDetailPayload: + properties: + chosen_options: + items: + type: string + type: array + title: Chosen Options + params_input: + additionalProperties: true + type: object + title: Params Input + type: object + required: + - chosen_options + title: UpdateHITLDetailPayload + description: Schema for updating the content of a Human-in-the-loop detail. ValidationError: properties: loc: diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/__init__.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/__init__.py index fbbfb46dfa8d0..6db86ce2327a6 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/__init__.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/__init__.py @@ -37,6 +37,7 @@ from airflow.api_fastapi.core_api.routes.public.dags import dags_router from airflow.api_fastapi.core_api.routes.public.event_logs import event_logs_router from airflow.api_fastapi.core_api.routes.public.extra_links import extra_links_router +from airflow.api_fastapi.core_api.routes.public.hitl import hitl_router from airflow.api_fastapi.core_api.routes.public.import_error import import_error_router from airflow.api_fastapi.core_api.routes.public.job import job_router from airflow.api_fastapi.core_api.routes.public.log import task_instances_log_router @@ -83,6 +84,7 @@ authenticated_router.include_router(dag_parsing_router) authenticated_router.include_router(dag_tags_router) authenticated_router.include_router(dag_versions_router) +authenticated_router.include_router(hitl_router) # Include authenticated router in public router diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_run.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_run.py index f50b92cb27b2c..5d974ec012cd5 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_run.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dag_run.py @@ -17,11 +17,13 @@ from __future__ import annotations +import textwrap from typing import Annotated, Literal, cast import structlog from fastapi import Depends, HTTPException, Query, status from fastapi.exceptions import RequestValidationError +from fastapi.responses import StreamingResponse from pydantic import ValidationError from sqlalchemy import select from sqlalchemy.orm import joinedload @@ -46,9 +48,12 @@ Range, RangeFilter, SortParam, + _SearchParam, datetime_range_filter_factory, + search_param_factory, ) from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.common.types import Mimetype from airflow.api_fastapi.core_api.datamodels.assets import AssetEventCollectionResponse from airflow.api_fastapi.core_api.datamodels.dag_run import ( DAGRunClearBody, @@ -70,6 +75,7 @@ requires_access_asset, requires_access_dag, ) +from airflow.api_fastapi.core_api.services.public.dag_run import DagRunWaiter from airflow.api_fastapi.logging.decorators import action_logging from airflow.exceptions import ParamValidationError from airflow.listeners.listener import get_listener_manager @@ -334,6 +340,7 @@ def get_dag_runs( readable_dag_runs_filter: ReadableDagRunsFilterDep, session: SessionDep, dag_bag: DagBagDep, + run_id_pattern: Annotated[_SearchParam, Depends(search_param_factory(DagRun.run_id, "run_id_pattern"))], ) -> DAGRunCollectionResponse: """ Get all DAG Runs. @@ -360,6 +367,7 @@ def get_dag_runs( state, run_type, readable_dag_runs_filter, + run_id_pattern, ], order_by=order_by, offset=offset, @@ -422,6 +430,7 @@ def trigger_dag_run( state=DagRunState.QUEUED, session=session, ) + dag_run_note = body.note if dag_run_note: current_user_id = user.get_id() @@ -433,6 +442,57 @@ def trigger_dag_run( raise HTTPException(status.HTTP_400_BAD_REQUEST, str(e)) +@dag_run_router.get( + "/{dag_run_id}/wait", + tags=["experimental"], + summary="Experimental: Wait for a dag run to complete, and return task results if requested.", + description="🚧 This is an experimental endpoint and may change or be removed without notice.", + responses={ + **create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + status.HTTP_200_OK: { + "description": "Successful Response", + "content": { + Mimetype.NDJSON: { + "schema": { + "type": "string", + "example": textwrap.dedent( + """\ + {"state": "running"} + {"state": "success", "results": {"op": 42}} + """ + ), + } + } + }, + }, + }, + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.RUN))], +) +def wait_dag_run_until_finished( + dag_id: str, + dag_run_id: str, + session: SessionDep, + interval: Annotated[float, Query(gt=0.0, description="Seconds to wait between dag run state checks")], + result_task_ids: Annotated[ + list[str] | None, + Query(alias="result", description="Collect result XCom from task. Can be set multiple times."), + ] = None, +): + "Wait for a dag run until it finishes, and return its result(s)." + if not session.scalar(select(1).where(DagRun.dag_id == dag_id, DagRun.run_id == dag_run_id)): + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found", + ) + waiter = DagRunWaiter( + dag_id=dag_id, + run_id=dag_run_id, + interval=interval, + result_task_ids=result_task_ids, + ) + return StreamingResponse(waiter.wait()) + + @dag_run_router.post( "/list", responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dags.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dags.py index 0cc1893543fd7..d418ca0e9f582 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dags.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/dags.py @@ -22,7 +22,7 @@ from fastapi import Depends, HTTPException, Query, Response, status from fastapi.exceptions import RequestValidationError from pydantic import ValidationError -from sqlalchemy import select, update +from sqlalchemy import delete, insert, select, update from airflow.api.common import delete_dag as delete_dag_module from airflow.api_fastapi.common.dagbag import DagBagDep @@ -38,6 +38,7 @@ QueryDagIdPatternSearch, QueryDagIdPatternSearchWithNone, QueryExcludeStaleFilter, + QueryFavoriteFilter, QueryLastDagRunStateFilter, QueryLimit, QueryOffset, @@ -60,12 +61,14 @@ from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc from airflow.api_fastapi.core_api.security import ( EditableDagsFilterDep, + GetUserDep, ReadableDagsFilterDep, requires_access_dag, ) from airflow.api_fastapi.logging.decorators import action_logging from airflow.exceptions import AirflowException, DagNotFound from airflow.models import DAG, DagModel +from airflow.models.dag_favorite import DagFavorite from airflow.models.dagrun import DagRun dags_router = AirflowRouter(tags=["DAG"], prefix="/dags") @@ -113,6 +116,7 @@ def get_dags( ], readable_dags_filter: ReadableDagsFilterDep, session: SessionDep, + is_favorite: QueryFavoriteFilter, ) -> DAGCollectionResponse: """Get all DAGs.""" query = generate_dag_with_latest_run_query( @@ -133,6 +137,7 @@ def get_dags( dag_id_pattern, dag_display_name_pattern, tags, + is_favorite, owners, readable_dags_filter, ], @@ -317,6 +322,54 @@ def patch_dags( ) +@dags_router.post( + "/{dag_id}/favorite", + status_code=status.HTTP_204_NO_CONTENT, + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + dependencies=[Depends(requires_access_dag(method="GET")), Depends(action_logging())], +) +def favorite_dag(dag_id: str, session: SessionDep, user: GetUserDep): + """Mark the DAG as favorite.""" + dag = session.get(DagModel, dag_id) + if not dag: + raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"DAG with id '{dag_id}' not found") + + user_id = str(user.get_id()) + session.execute(insert(DagFavorite).values(dag_id=dag_id, user_id=user_id)) + + +@dags_router.post( + "/{dag_id}/unfavorite", + status_code=status.HTTP_204_NO_CONTENT, + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND, status.HTTP_409_CONFLICT]), + dependencies=[Depends(requires_access_dag(method="GET")), Depends(action_logging())], +) +def unfavorite_dag(dag_id: str, session: SessionDep, user: GetUserDep): + """Unmark the DAG as favorite.""" + dag = session.get(DagModel, dag_id) + if not dag: + raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"DAG with id '{dag_id}' not found") + + user_id = str(user.get_id()) + + favorite_exists = session.execute( + select(DagFavorite).where( + DagFavorite.dag_id == dag_id, + DagFavorite.user_id == user_id, + ) + ).first() + + if not favorite_exists: + raise HTTPException(status.HTTP_409_CONFLICT, detail="DAG is not marked as favorite") + + session.execute( + delete(DagFavorite).where( + DagFavorite.dag_id == dag_id, + DagFavorite.user_id == user_id, + ) + ) + + @dags_router.delete( "/{dag_id}", responses=create_openapi_http_exception_doc( diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/extra_links.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/extra_links.py index f21be70388039..1be9f580b0621 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/extra_links.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/extra_links.py @@ -17,7 +17,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast from fastapi import Depends, HTTPException, status from sqlalchemy.sql import select @@ -31,7 +31,8 @@ from airflow.exceptions import TaskNotFound if TYPE_CHECKING: - from airflow.models import DAG + from airflow.models.mappedoperator import MappedOperator + from airflow.serialization.serialized_objects import SerializedBaseOperator extra_links_router = AirflowRouter( @@ -56,12 +57,12 @@ def get_extra_links( """Get extra links for task instance.""" from airflow.models.taskinstance import TaskInstance - dag: DAG = dag_bag.get_dag(dag_id) - if not dag: + if (dag := dag_bag.get_dag(dag_id)) is None: raise HTTPException(status.HTTP_404_NOT_FOUND, f"DAG with ID = {dag_id} not found") try: - task = dag.get_task(task_id) + # TODO (GH-52141): Make dag a db-backed object so it only returns db-backed tasks. + task = cast("MappedOperator | SerializedBaseOperator", dag.get_task(task_id)) except TaskNotFound: raise HTTPException(status.HTTP_404_NOT_FOUND, f"Task with ID = {task_id} not found") diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py new file mode 100644 index 0000000000000..78c7604b51677 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/hitl.py @@ -0,0 +1,274 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import structlog +from fastapi import Depends, HTTPException, status +from sqlalchemy import select + +from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity +from airflow.api_fastapi.common.db.common import SessionDep, paginated_select +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.datamodels.hitl import ( + HITLDetail, + HITLDetailCollection, + HITLDetailResponse, + UpdateHITLDetailPayload, +) +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.core_api.security import GetUserDep, ReadableTIFilterDep, requires_access_dag +from airflow.models.hitl import HITLDetail as HITLDetailModel +from airflow.models.taskinstance import TaskInstance as TI +from airflow.utils import timezone + +hitl_router = AirflowRouter(tags=["HumanInTheLoop"], prefix="/hitl-details") + +log = structlog.get_logger(__name__) + + +def _get_task_instance( + dag_id: str, + dag_run_id: str, + task_id: str, + session: SessionDep, + map_index: int | None = None, +) -> TI: + query = select(TI).where( + TI.dag_id == dag_id, + TI.run_id == dag_run_id, + TI.task_id == task_id, + ) + + if map_index is not None: + query = query.where(TI.map_index == map_index) + + task_instance = session.scalar(query) + if task_instance is None: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}` and map_index: `{map_index}` was not found", + ) + if map_index is None and task_instance.map_index != -1: + raise HTTPException( + status.HTTP_404_NOT_FOUND, "Task instance is mapped, add the map_index value to the URL" + ) + + return task_instance + + +def _update_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + update_hitl_detail_payload: UpdateHITLDetailPayload, + user: GetUserDep, + session: SessionDep, + map_index: int | None = None, +) -> HITLDetailResponse: + task_instance = _get_task_instance( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + map_index=map_index, + ) + ti_id_str = str(task_instance.id) + hitl_detail_model = session.scalar(select(HITLDetailModel).where(HITLDetailModel.ti_id == ti_id_str)) + if not hitl_detail_model: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"Human-in-the-loop detail does not exist for Task Instance with id {ti_id_str}", + ) + + if hitl_detail_model.response_received: + raise HTTPException( + status.HTTP_409_CONFLICT, + f"Human-in-the-loop detail has already been updated for Task Instance with id {ti_id_str} " + "and is not allowed to write again.", + ) + + hitl_detail_model.user_id = user.get_id() + hitl_detail_model.response_at = timezone.utcnow() + hitl_detail_model.chosen_options = update_hitl_detail_payload.chosen_options + hitl_detail_model.params_input = update_hitl_detail_payload.params_input + session.add(hitl_detail_model) + session.commit() + return HITLDetailResponse.model_validate(hitl_detail_model) + + +def _get_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + session: SessionDep, + map_index: int | None = None, +) -> HITLDetail: + """Get a Human-in-the-loop detail of a specific task instance.""" + task_instance = _get_task_instance( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + map_index=map_index, + ) + if task_instance is None: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}` and map_index: `{map_index}` was not found", + ) + + ti_id_str = str(task_instance.id) + hitl_detail_model = session.scalar(select(HITLDetailModel).where(HITLDetailModel.ti_id == ti_id_str)) + if not hitl_detail_model: + log.error("Human-in-the-loop detail not found") + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail={ + "reason": "not_found", + "message": "Human-in-the-loop detail not found", + }, + ) + return HITLDetail.model_validate(hitl_detail_model) + + +@hitl_router.patch( + "/{dag_id}/{dag_run_id}/{task_id}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], +) +def update_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + update_hitl_detail_payload: UpdateHITLDetailPayload, + user: GetUserDep, + session: SessionDep, +) -> HITLDetailResponse: + """Update a Human-in-the-loop detail.""" + return _update_hitl_detail( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + update_hitl_detail_payload=update_hitl_detail_payload, + user=user, + map_index=None, + ) + + +@hitl_router.patch( + "/{dag_id}/{dag_run_id}/{task_id}/{map_index}", + responses=create_openapi_http_exception_doc( + [ + status.HTTP_404_NOT_FOUND, + status.HTTP_409_CONFLICT, + ] + ), + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], +) +def update_mapped_ti_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + update_hitl_detail_payload: UpdateHITLDetailPayload, + user: GetUserDep, + session: SessionDep, + map_index: int, +) -> HITLDetailResponse: + """Update a Human-in-the-loop detail.""" + return _update_hitl_detail( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + update_hitl_detail_payload=update_hitl_detail_payload, + user=user, + map_index=map_index, + ) + + +@hitl_router.get( + "/{dag_id}/{dag_run_id}/{task_id}", + status_code=status.HTTP_200_OK, + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], +) +def get_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + session: SessionDep, +) -> HITLDetail: + """Get a Human-in-the-loop detail of a specific task instance.""" + return _get_hitl_detail( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + map_index=None, + ) + + +@hitl_router.get( + "/{dag_id}/{dag_run_id}/{task_id}/{map_index}", + status_code=status.HTTP_200_OK, + responses=create_openapi_http_exception_doc([status.HTTP_404_NOT_FOUND]), + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], +) +def get_mapped_ti_hitl_detail( + dag_id: str, + dag_run_id: str, + task_id: str, + session: SessionDep, + map_index: int, +) -> HITLDetail: + """Get a Human-in-the-loop detail of a specific task instance.""" + return _get_hitl_detail( + dag_id=dag_id, + dag_run_id=dag_run_id, + task_id=task_id, + session=session, + map_index=map_index, + ) + + +@hitl_router.get( + "/", + status_code=status.HTTP_200_OK, + dependencies=[Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE))], +) +def get_hitl_details( + readable_ti_filter: ReadableTIFilterDep, + session: SessionDep, +) -> HITLDetailCollection: + """Get Human-in-the-loop details.""" + query = select(HITLDetailModel).join(TI, HITLDetailModel.ti_id == TI.id) + hitl_detail_select, total_entries = paginated_select( + statement=query, + filters=[readable_ti_filter], + session=session, + ) + hitl_details = session.scalars(hitl_detail_select) + return HITLDetailCollection( + hitl_details=hitl_details, + total_entries=total_entries, + ) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/log.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/log.py index 01cf859f05efd..688a563d2b6d4 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/public/log.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/public/log.py @@ -20,9 +20,10 @@ import contextlib import textwrap -from fastapi import Depends, HTTPException, Request, Response, status +from fastapi import Depends, HTTPException, Request, status +from fastapi.responses import StreamingResponse from itsdangerous import BadSignature, URLSafeSerializer -from pydantic import PositiveInt +from pydantic import NonNegativeInt, PositiveInt from sqlalchemy.orm import joinedload from sqlalchemy.sql import select @@ -75,7 +76,7 @@ def get_log( dag_id: str, dag_run_id: str, task_id: str, - try_number: PositiveInt, + try_number: NonNegativeInt, accept: HeaderAcceptJsonOrNdjson, request: Request, dag_bag: DagBagDep, @@ -120,12 +121,17 @@ def get_log( ) ti = session.scalar(query) if ti is None: - query = select(TaskInstanceHistory).where( - TaskInstanceHistory.task_id == task_id, - TaskInstanceHistory.dag_id == dag_id, - TaskInstanceHistory.run_id == dag_run_id, - TaskInstanceHistory.map_index == map_index, - TaskInstanceHistory.try_number == try_number, + query = ( + select(TaskInstanceHistory) + .where( + TaskInstanceHistory.task_id == task_id, + TaskInstanceHistory.dag_id == dag_id, + TaskInstanceHistory.run_id == dag_run_id, + TaskInstanceHistory.map_index == map_index, + TaskInstanceHistory.try_number == try_number, + ) + .options(joinedload(TaskInstanceHistory.dag_run)) + # we need to joinedload the dag_run, since FileTaskHandler._render_filename needs ti.dag_run ) ti = session.scalar(query) @@ -138,24 +144,27 @@ def get_log( with contextlib.suppress(TaskNotFound): ti.task = dag.get_task(ti.task_id) - if accept == Mimetype.JSON or accept == Mimetype.ANY: # default - logs, metadata = task_log_reader.read_log_chunks(ti, try_number, metadata) - encoded_token = None + if accept == Mimetype.NDJSON: # only specified application/x-ndjson will return streaming response + # LogMetadata(TypedDict) is used as type annotation for log_reader; added ignore to suppress mypy error + log_stream = task_log_reader.read_log_stream(ti, try_number, metadata) # type: ignore[arg-type] + headers = None if not metadata.get("end_of_log", False): - encoded_token = URLSafeSerializer(request.app.state.secret_key).dumps(metadata) - return TaskInstancesLogResponse.model_construct(continuation_token=encoded_token, content=logs) - # text/plain, or something else we don't understand. Return raw log content - - # We need to exhaust the iterator before we can generate the continuation token. - # We could improve this by making it a streaming/async response, and by then setting the header using - # HTTP Trailers - logs = "".join(task_log_reader.read_log_stream(ti, try_number, metadata)) - headers = None - if not metadata.get("end_of_log", False): - headers = { - "Airflow-Continuation-Token": URLSafeSerializer(request.app.state.secret_key).dumps(metadata) - } - return Response(media_type="application/x-ndjson", content=logs, headers=headers) + headers = { + "Airflow-Continuation-Token": URLSafeSerializer(request.app.state.secret_key).dumps(metadata) + } + return StreamingResponse(media_type="application/x-ndjson", content=log_stream, headers=headers) + + # application/json, or something else we don't understand. + # Return JSON format, which will be more easily for users to debug. + + # LogMetadata(TypedDict) is used as type annotation for log_reader; added ignore to suppress mypy error + structured_log_stream, out_metadata = task_log_reader.read_log_chunks(ti, try_number, metadata) # type: ignore[arg-type] + encoded_token = None + if not out_metadata.get("end_of_log", False): + encoded_token = URLSafeSerializer(request.app.state.secret_key).dumps(out_metadata) + return TaskInstancesLogResponse.model_construct( + continuation_token=encoded_token, content=list(structured_log_stream) + ) @task_instances_log_router.get( diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/__init__.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/__init__.py index f7b19c53ce2f5..677776574e80d 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/__init__.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/__init__.py @@ -20,6 +20,7 @@ from airflow.api_fastapi.core_api.routes.ui.assets import assets_router from airflow.api_fastapi.core_api.routes.ui.auth import auth_router from airflow.api_fastapi.core_api.routes.ui.backfills import backfills_router +from airflow.api_fastapi.core_api.routes.ui.calendar import calendar_router from airflow.api_fastapi.core_api.routes.ui.config import config_router from airflow.api_fastapi.core_api.routes.ui.connections import connections_router from airflow.api_fastapi.core_api.routes.ui.dags import dags_router @@ -40,3 +41,4 @@ ui_router.include_router(structure_router) ui_router.include_router(backfills_router) ui_router.include_router(grid_router) +ui_router.include_router(calendar_router) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/calendar.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/calendar.py new file mode 100644 index 0000000000000..cdb1902904bf2 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/calendar.py @@ -0,0 +1,70 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from typing import Annotated, Literal + +from fastapi import Depends + +from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity +from airflow.api_fastapi.common.dagbag import DagBagDep +from airflow.api_fastapi.common.db.common import SessionDep +from airflow.api_fastapi.common.parameters import RangeFilter, datetime_range_filter_factory +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.datamodels.ui.calendar import CalendarTimeRangeCollectionResponse +from airflow.api_fastapi.core_api.security import requires_access_dag +from airflow.api_fastapi.core_api.services.ui.calendar import CalendarService +from airflow.models.dagrun import DagRun + +calendar_router = AirflowRouter(prefix="/calendar", tags=["Calendar"]) + + +@calendar_router.get( + "/{dag_id}", + dependencies=[ + Depends( + requires_access_dag( + method="GET", + access_entity=DagAccessEntity.TASK_INSTANCE, + ) + ), + Depends( + requires_access_dag( + method="GET", + access_entity=DagAccessEntity.RUN, + ) + ), + ], +) +def get_calendar( + dag_id: str, + session: SessionDep, + dag_bag: DagBagDep, + logical_date: Annotated[RangeFilter, Depends(datetime_range_filter_factory("logical_date", DagRun))], + granularity: Literal["hourly", "daily"] = "daily", +) -> CalendarTimeRangeCollectionResponse: + """Get calendar data for a DAG including historical and planned DAG runs.""" + dag = dag_bag.get_dag(dag_id) + calendar_service = CalendarService() + + return calendar_service.get_calendar_data( + dag_id=dag_id, + session=session, + dag=dag, + logical_date=logical_date, + granularity=granularity, + ) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/dags.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/dags.py index ad019089640e6..56b4d76a27405 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/dags.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/dags.py @@ -34,6 +34,7 @@ QueryDagDisplayNamePatternSearch, QueryDagIdPatternSearch, QueryExcludeStaleFilter, + QueryFavoriteFilter, QueryLastDagRunStateFilter, QueryLimit, QueryOffset, @@ -92,6 +93,7 @@ def get_dags( ).dynamic_depends() ), ], + is_favorite: QueryFavoriteFilter, readable_dags_filter: ReadableDagsFilterDep, session: SessionDep, dag_runs_limit: int = 10, @@ -116,6 +118,7 @@ def get_dags( tags, owners, last_dag_run_state, + is_favorite, readable_dags_filter, ], order_by=order_by, diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/grid.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/grid.py index 095f0f6cce7b6..5c7494d3b704c 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/grid.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/grid.py @@ -18,23 +18,15 @@ from __future__ import annotations import collections -import itertools from typing import TYPE_CHECKING, Annotated import structlog from fastapi import Depends, HTTPException, status from sqlalchemy import select -from sqlalchemy.orm import joinedload, selectinload -from airflow import DAG from airflow.api_fastapi.auth.managers.models.resource_details import DagAccessEntity -from airflow.api_fastapi.common.dagbag import DagBagDep from airflow.api_fastapi.common.db.common import SessionDep, paginated_select from airflow.api_fastapi.common.parameters import ( - QueryDagRunRunTypesFilter, - QueryDagRunStateFilter, - QueryIncludeDownstream, - QueryIncludeUpstream, QueryLimit, QueryOffset, RangeFilter, @@ -48,8 +40,6 @@ LatestRunResponse, ) from airflow.api_fastapi.core_api.datamodels.ui.grid import ( - GridDAGRunwithTIs, - GridResponse, GridTISummaries, ) from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc @@ -57,16 +47,11 @@ from airflow.api_fastapi.core_api.services.ui.grid import ( _find_aggregates, _merge_node_dicts, - fill_task_instance_summaries, - get_child_task_map, - get_task_group_map, ) from airflow.models.dag_version import DagVersion from airflow.models.dagrun import DagRun from airflow.models.serialized_dag import SerializedDagModel from airflow.models.taskinstance import TaskInstance -from airflow.models.taskinstancehistory import TaskInstanceHistory -from airflow.utils.state import TaskInstanceState from airflow.utils.task_group import ( get_task_group_children_getter, task_group_to_dict_grid, @@ -76,201 +61,6 @@ grid_router = AirflowRouter(prefix="/grid", tags=["Grid"]) -@grid_router.get( - "/{dag_id}", - responses=create_openapi_http_exception_doc([status.HTTP_400_BAD_REQUEST, status.HTTP_404_NOT_FOUND]), - dependencies=[ - Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.TASK_INSTANCE)), - Depends(requires_access_dag(method="GET", access_entity=DagAccessEntity.RUN)), - ], - response_model_exclude_none=True, -) -def grid_data( - dag_id: str, - session: SessionDep, - offset: QueryOffset, - dag_bag: DagBagDep, - run_type: QueryDagRunRunTypesFilter, - state: QueryDagRunStateFilter, - limit: QueryLimit, - order_by: Annotated[ - SortParam, - Depends(SortParam(["run_after", "logical_date", "start_date", "end_date"], DagRun).dynamic_depends()), - ], - run_after: Annotated[RangeFilter, Depends(datetime_range_filter_factory("run_after", DagRun))], - logical_date: Annotated[RangeFilter, Depends(datetime_range_filter_factory("logical_date", DagRun))], - include_upstream: QueryIncludeUpstream = False, - include_downstream: QueryIncludeDownstream = False, - root: str | None = None, -) -> GridResponse: - """Return grid data.""" - dag: DAG = dag_bag.get_dag(dag_id) - if not dag: - raise HTTPException(status.HTTP_404_NOT_FOUND, f"Dag with id {dag_id} was not found") - - # Retrieve, sort the previous DAG Runs - base_query = ( - select(DagRun) - .join(DagRun.dag_run_note, isouter=True) - .options(joinedload(DagRun.task_instances).joinedload(TaskInstance.dag_version)) - .options(joinedload(DagRun.task_instances_histories).joinedload(TaskInstanceHistory.dag_version)) - .where(DagRun.dag_id == dag.dag_id) - ) - - # This comparison is to falls to DAG timetable when no order_by is provided - if order_by.value == order_by.get_primary_key_string(): - order_by = SortParam( - allowed_attrs=[run_ordering for run_ordering in dag.timetable.run_ordering], model=DagRun - ).set_value(dag.timetable.run_ordering[0]) - - dag_runs_select_filter, _ = paginated_select( - statement=base_query, - filters=[ - run_type, - state, - run_after, - logical_date, - ], - order_by=order_by, - offset=offset, - limit=limit, - ) - - dag_runs = list(session.scalars(dag_runs_select_filter).unique()) - # Check if there are any DAG Runs with given criteria to eliminate unnecessary queries/errors - if not dag_runs: - return GridResponse(dag_runs=[]) - - # Retrieve, sort and encode the Task Instances - tis_of_dag_runs, _ = paginated_select( - statement=select(TaskInstance) - .options(selectinload(TaskInstance.task_instance_note)) - .where(TaskInstance.dag_id == dag.dag_id) - .where(TaskInstance.run_id.in_([dag_run.run_id for dag_run in dag_runs])), - filters=[], - order_by=SortParam(allowed_attrs=["task_id", "run_id"], model=TaskInstance).set_value("task_id"), - offset=offset, - limit=None, - ) - - task_instances = session.scalars(tis_of_dag_runs) - - tis_by_run_id: dict[str, list[TaskInstance]] = collections.defaultdict(list) - for ti in task_instances: - tis_by_run_id[ti.run_id].append(ti) - - # Generate Grouped Task Instances - task_node_map_exclude = None - if root: - task_node_map_exclude = get_task_group_map( - dag=dag.partial_subset( - task_ids=root, - include_upstream=include_upstream, - include_downstream=include_downstream, - ) - ) - - # Group the Task Instances by Parent Task (TaskGroup or Mapped) and All Task Instances - parent_tis: dict[tuple[str, str], list] = collections.defaultdict(list) - all_tis: dict[tuple[str, str], list] = collections.defaultdict(list) - - for tis in tis_by_run_id.values(): - # this is a simplification - we account for structure based on the first task - version = tis[0].dag_version - if not version: - version = session.scalar( - select(DagVersion) - .where( - DagVersion.dag_id == tis[0].dag_id, - ) - .order_by(DagVersion.id) # ascending cus this is mostly for pre-3.0 upgrade - .limit(1) - ) - if not version.serialized_dag: - log.error( - "No serialized dag found", - dag_id=tis[0].dag_id, - version_id=version.id, - version_number=version.version_number, - ) - continue - run_dag = version.serialized_dag.dag - task_node_map = get_task_group_map(dag=run_dag) - for ti in tis: - # Skip the Task Instances if upstream/downstream filtering is applied or if the task was removed. - if ( - task_node_map_exclude and ti.task_id not in task_node_map_exclude - ) or ti.state == TaskInstanceState.REMOVED: - continue - - # Populate the Grouped Task Instances (All Task Instances except the Parent Task Instances) - if ti.task_id in get_child_task_map( - parent_task_id=task_node_map[ti.task_id]["parent_id"], task_node_map=task_node_map - ): - all_tis[(ti.task_id, ti.run_id)].append(ti) - # Populate the Parent Task Instances - parent_id = task_node_map[ti.task_id]["parent_id"] - if not parent_id and task_node_map[ti.task_id]["is_group"]: - parent_tis[(ti.task_id, ti.run_id)].append(ti) - elif parent_id and task_node_map[parent_id]["is_group"]: - parent_tis[(parent_id, ti.run_id)].append(ti) - - # Clear task_node_map_exclude to free up memory - if task_node_map_exclude: - task_node_map_exclude.clear() - - task_node_map = get_task_group_map(dag=dag) - # Extend subgroup task instances to parent task instances to calculate the aggregates states - task_group_map = {k: v for k, v in task_node_map.items() if v["is_group"]} - parent_tis.update( - { - (task_id_parent, run_id): parent_tis[(task_id_parent, run_id)] + parent_tis[(task_id, run_id)] - for task_id, task_map in task_group_map.items() - if task_map["is_group"] - for (task_id_parent, run_id), tis in list(parent_tis.items()) - if task_id_parent == task_map["parent_id"] - } - ) - # Create the Task Instance Summaries to be used in the Grid Response - task_instance_summaries: dict[str, list] = { - run_id: [] for _, run_id in itertools.chain(parent_tis, all_tis) - } - - # Fill the Task Instance Summaries for the Parent and Grouped Task Instances. - # First the Parent Task Instances because they are used in the Grouped Task Instances - fill_task_instance_summaries( - grouped_task_instances=parent_tis, - task_instance_summaries_to_fill=task_instance_summaries, - session=session, - ) - # Fill the Task Instance Summaries for the Grouped Task Instances - fill_task_instance_summaries( - grouped_task_instances=all_tis, - task_instance_summaries_to_fill=task_instance_summaries, - session=session, - ) - - # Aggregate the Task Instances by DAG Run - grid_dag_runs = [ - GridDAGRunwithTIs( - run_id=dag_run.run_id, - queued_at=dag_run.queued_at, - start_date=dag_run.start_date, - end_date=dag_run.end_date, - run_after=dag_run.run_after, - logical_date=dag_run.logical_date, - state=dag_run.state, - run_type=dag_run.run_type, - data_interval_start=dag_run.data_interval_start, - data_interval_end=dag_run.data_interval_end, - note=dag_run.note, - task_instances=task_instance_summaries.get(dag_run.run_id, []), - ) - for dag_run in dag_runs - ] - return GridResponse(dag_runs=grid_dag_runs) - - def _get_latest_serdag(dag_id, session): serdag = session.scalar( select(SerializedDagModel) @@ -480,7 +270,6 @@ def get_grid_runs( ) ), ], - response_model_exclude_none=True, ) def get_grid_ti_summaries( dag_id: str, diff --git a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/structure.py b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/structure.py index 738e4c6edf65f..fff7325f41aba 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/structure.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/routes/ui/structure.py @@ -141,7 +141,7 @@ def structure_data( } ) - if asset_expression := serialized_dag.dag_model.asset_expression: + if (asset_expression := serialized_dag.dag_model.asset_expression) and entry_node_ref: upstream_asset_nodes, upstream_asset_edges = get_upstream_assets( asset_expression, entry_node_ref["id"] ) diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/public/dag_run.py b/airflow-core/src/airflow/api_fastapi/core_api/services/public/dag_run.py new file mode 100644 index 0000000000000..259389e799494 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/public/dag_run.py @@ -0,0 +1,85 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import asyncio +import itertools +import json +import operator +from typing import TYPE_CHECKING, Any + +import attrs +from sqlalchemy import select + +from airflow.models.dagrun import DagRun +from airflow.models.xcom import XCOM_RETURN_KEY, XComModel +from airflow.utils.session import create_session_async +from airflow.utils.state import State + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Iterator + + +@attrs.define +class DagRunWaiter: + """Wait for the specified dag run to finish, and collect info from it.""" + + dag_id: str + run_id: str + interval: float + result_task_ids: list[str] | None + + async def _get_dag_run(self) -> DagRun: + async with create_session_async() as session: + return await session.scalar(select(DagRun).filter_by(dag_id=self.dag_id, run_id=self.run_id)) + + def _serialize_xcoms(self) -> dict[str, Any]: + xcom_query = XComModel.get_many( + run_id=self.run_id, + key=XCOM_RETURN_KEY, + task_ids=self.result_task_ids, + dag_ids=self.dag_id, + ) + xcom_query = xcom_query.order_by(XComModel.task_id, XComModel.map_index) + + def _group_xcoms(g: Iterator[XComModel]) -> Any: + entries = list(g) + if len(entries) == 1 and entries[0].map_index < 0: # Unpack non-mapped task xcom. + return entries[0].value + return [entry.value for entry in entries] # Task is mapped; return all xcoms in a list. + + return { + task_id: _group_xcoms(g) + for task_id, g in itertools.groupby(xcom_query, key=operator.attrgetter("task_id")) + } + + def _serialize_response(self, dag_run: DagRun) -> str: + resp = {"state": dag_run.state} + if dag_run.state not in State.finished_dr_states: + return json.dumps(resp) + if self.result_task_ids: + resp["results"] = self._serialize_xcoms() + return json.dumps(resp) + + async def wait(self) -> AsyncGenerator[str, None]: + yield self._serialize_response(dag_run := await self._get_dag_run()) + yield "\n" + while dag_run.state not in State.finished_dr_states: + await asyncio.sleep(self.interval) + yield self._serialize_response(dag_run := await self._get_dag_run()) + yield "\n" diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/calendar.py b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/calendar.py new file mode 100644 index 0000000000000..de51a1dd27f49 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/calendar.py @@ -0,0 +1,325 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import collections +import datetime +from collections.abc import Iterator +from typing import Literal, cast + +import sqlalchemy as sa +import structlog +from croniter.croniter import croniter +from pendulum import DateTime +from sqlalchemy.engine import Row +from sqlalchemy.orm import Session + +from airflow.api_fastapi.common.parameters import RangeFilter +from airflow.api_fastapi.core_api.datamodels.ui.calendar import ( + CalendarTimeRangeCollectionResponse, + CalendarTimeRangeResponse, +) +from airflow.models.dag import DAG +from airflow.models.dagrun import DagRun +from airflow.timetables._cron import CronMixin +from airflow.timetables.base import DataInterval, TimeRestriction +from airflow.timetables.simple import ContinuousTimetable +from airflow.utils import timezone + +log = structlog.get_logger(logger_name=__name__) + + +class CalendarService: + """Service class for calendar-related operations.""" + + MAX_PLANNED_RUNS: int = 2000 + + def get_calendar_data( + self, + dag_id: str, + session: Session, + dag: DAG, + logical_date: RangeFilter, + granularity: Literal["hourly", "daily"] = "daily", + ) -> CalendarTimeRangeCollectionResponse: + """ + Get calendar data for a DAG including historical and planned runs. + + Args: + dag_id: The DAG ID + session: Database session + dag: The DAG object + logical_date: Date range filter + granularity: Time granularity ("hourly" or "daily") + + Returns: + List of calendar time range results + """ + historical_data, raw_dag_states = self._get_historical_dag_runs( + dag_id, + session, + logical_date, + granularity, + ) + + planned_data = self._get_planned_dag_runs(dag, raw_dag_states, logical_date, granularity) + + all_data = historical_data + planned_data + return CalendarTimeRangeCollectionResponse( + total_entries=len(all_data), + dag_runs=all_data, + ) + + def _get_historical_dag_runs( + self, + dag_id: str, + session: Session, + logical_date: RangeFilter, + granularity: Literal["hourly", "daily"], + ) -> tuple[list[CalendarTimeRangeResponse], list[Row]]: + """Get historical DAG runs from the database.""" + dialect = session.bind.dialect.name + + time_expression = self._get_time_truncation_expression(DagRun.logical_date, granularity, dialect) + + select_stmt = ( + sa.select( + time_expression.label("datetime"), + DagRun.state, + sa.func.max(DagRun.data_interval_start).label("data_interval_start"), + sa.func.max(DagRun.data_interval_end).label("data_interval_end"), + sa.func.count("*").label("count"), + ) + .where(DagRun.dag_id == dag_id) + .group_by(time_expression, DagRun.state) + .order_by(time_expression.asc()) + ) + + select_stmt = logical_date.to_orm(select_stmt) + dag_states = session.execute(select_stmt).all() + + calendar_results = [ + CalendarTimeRangeResponse( + # ds.datetime in sqlite and mysql is a string, in postgresql it is a datetime + date=ds.datetime, + state=ds.state, + count=ds.count, + ) + for ds in dag_states + ] + + return calendar_results, dag_states + + def _get_planned_dag_runs( + self, + dag: DAG, + raw_dag_states: list[Row], + logical_date: RangeFilter, + granularity: Literal["hourly", "daily"], + ) -> list[CalendarTimeRangeResponse]: + """Get planned DAG runs based on the DAG's timetable.""" + if not self._should_calculate_planned_runs(dag, raw_dag_states): + return [] + + last_data_interval = self._get_last_data_interval(raw_dag_states) + if not last_data_interval: + return [] + + year = last_data_interval.end.year + restriction = TimeRestriction( + timezone.coerce_datetime(dag.start_date) if dag.start_date else None, + timezone.coerce_datetime(dag.end_date) if dag.end_date else None, + False, + ) + + if isinstance(dag.timetable, CronMixin): + return self._calculate_cron_planned_runs(dag, last_data_interval, year, logical_date, granularity) + return self._calculate_timetable_planned_runs( + dag, last_data_interval, year, restriction, logical_date, granularity + ) + + def _should_calculate_planned_runs(self, dag: DAG, raw_dag_states: list[Row]) -> bool: + """Check if we should calculate planned runs.""" + return ( + bool(raw_dag_states) + and bool(raw_dag_states[-1].data_interval_start) + and bool(raw_dag_states[-1].data_interval_end) + and not isinstance(dag.timetable, ContinuousTimetable) + ) + + def _get_last_data_interval(self, raw_dag_states: list[Row]) -> DataInterval | None: + """Extract the last data interval from raw database results.""" + if not raw_dag_states: + return None + + last_state = raw_dag_states[-1] + if not (last_state.data_interval_start and last_state.data_interval_end): + return None + + return DataInterval( + timezone.coerce_datetime(last_state.data_interval_start), + timezone.coerce_datetime(last_state.data_interval_end), + ) + + def _calculate_cron_planned_runs( + self, + dag: DAG, + last_data_interval: DataInterval, + year: int, + logical_date: RangeFilter, + granularity: Literal["hourly", "daily"], + ) -> list[CalendarTimeRangeResponse]: + """Calculate planned runs for cron-based timetables.""" + dates: dict[datetime.datetime, int] = collections.Counter() + + dates_iter: Iterator[datetime.datetime | None] = croniter( + cast("CronMixin", dag.timetable)._expression, + start_time=last_data_interval.end, + ret_type=datetime.datetime, + ) + + for dt in dates_iter: + if dt is None or dt.year != year: + break + if dag.end_date and dt > dag.end_date: + break + if not self._is_date_in_range(dt, logical_date): + continue + + dates[self._truncate_datetime_for_granularity(dt, granularity)] += 1 + + return [ + CalendarTimeRangeResponse(date=dt, state="planned", count=count) for dt, count in dates.items() + ] + + def _calculate_timetable_planned_runs( + self, + dag: DAG, + last_data_interval: DataInterval, + year: int, + restriction: TimeRestriction, + logical_date: RangeFilter, + granularity: Literal["hourly", "daily"], + ) -> list[CalendarTimeRangeResponse]: + """Calculate planned runs for generic timetables.""" + dates: dict[datetime.datetime, int] = collections.Counter() + prev_logical_date = DateTime.min + total_planned = 0 + + while total_planned < self.MAX_PLANNED_RUNS: + curr_info = dag.timetable.next_dagrun_info( + last_automated_data_interval=last_data_interval, + restriction=restriction, + ) + + if curr_info is None: # No more DAG runs to schedule + break + if curr_info.logical_date <= prev_logical_date: # Timetable not progressing, stopping + break + if curr_info.logical_date.year != year: # Crossed year boundary + break + + if not self._is_date_in_range(curr_info.logical_date, logical_date): + last_data_interval = curr_info.data_interval + prev_logical_date = curr_info.logical_date + total_planned += 1 + continue + + last_data_interval = curr_info.data_interval + dt = self._truncate_datetime_for_granularity(curr_info.logical_date, granularity) + dates[dt] += 1 + prev_logical_date = curr_info.logical_date + total_planned += 1 + + return [ + CalendarTimeRangeResponse(date=dt, state="planned", count=count) for dt, count in dates.items() + ] + + def _get_time_truncation_expression( + self, + column: sa.Column, + granularity: Literal["hourly", "daily"], + dialect: str, + ) -> sa.Column: + """ + Get database-specific time truncation expression for SQLAlchemy. + + We want to return always timestamp for both hourly and daily truncation. + Unfortunately different databases have different functions for truncating datetime, so we need to handle + them separately. + + Args: + column: The datetime column to truncate + granularity: Either "hourly" or "daily" + dialect: Database dialect ("postgresql", "mysql", "sqlite") + + Returns: + SQLAlchemy expression for time truncation + + Raises: + ValueError: If the dialect is not supported + """ + if granularity == "hourly": + if dialect == "postgresql": + expression = sa.func.date_trunc("hour", column) + elif dialect == "mysql": + expression = sa.func.date_format(column, "%Y-%m-%dT%H:00:00Z") + elif dialect == "sqlite": + expression = sa.func.strftime("%Y-%m-%dT%H:00:00Z", column) + else: + raise ValueError(f"Unsupported dialect: {dialect}") + else: + if dialect == "postgresql": + expression = sa.func.timezone("UTC", sa.func.cast(sa.func.cast(column, sa.Date), sa.DateTime)) + elif dialect == "mysql": + expression = sa.func.date_format(column, "%Y-%m-%dT%00:00:00Z") + elif dialect == "sqlite": + expression = sa.func.strftime("%Y-%m-%dT00:00:00Z", column) + else: + raise ValueError(f"Unsupported dialect: {dialect}") + return expression + + def _truncate_datetime_for_granularity( + self, + dt: datetime.datetime, + granularity: Literal["hourly", "daily"], + ) -> datetime.datetime: + """ + Truncate datetime based on granularity for planned tasks grouping. + + Args: + dt: The datetime to truncate + granularity: Either "hourly" or "daily" + + Returns: + Truncated datetime + """ + if granularity == "hourly": + return dt.replace(minute=0, second=0, microsecond=0) + return dt.replace(hour=0, minute=0, second=0, microsecond=0) + + def _is_date_in_range(self, dt: datetime.datetime, logical_date: RangeFilter) -> bool: + """Check if a date is within the specified range filter.""" + if not logical_date.value: + return True + + if logical_date.value.lower_bound and dt < logical_date.value.lower_bound: + return False + if logical_date.value.upper_bound and dt > logical_date.value.upper_bound: + return False + + return True diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid.py b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid.py index a15ae5a3703e2..3bf22517da246 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/ui/grid.py @@ -17,289 +17,28 @@ from __future__ import annotations -import contextlib from collections import Counter from collections.abc import Iterable -from uuid import UUID import structlog -from sqlalchemy import select -from typing_extensions import Any -from airflow import DAG -from airflow.api_fastapi.common.db.common import SessionDep -from airflow.api_fastapi.common.parameters import ( - state_priority, -) -from airflow.api_fastapi.core_api.datamodels.ui.grid import ( - GridTaskInstanceSummary, -) -from airflow.api_fastapi.core_api.datamodels.ui.structure import ( - StructureDataResponse, -) -from airflow.models.baseoperator import BaseOperator as DBBaseOperator -from airflow.models.dag_version import DagVersion +from airflow.api_fastapi.common.parameters import state_priority from airflow.models.taskmap import TaskMap -from airflow.sdk import BaseOperator -from airflow.sdk.definitions._internal.abstractoperator import NotMapped -from airflow.sdk.definitions._internal.expandinput import NotFullyPopulated from airflow.sdk.definitions.mappedoperator import MappedOperator from airflow.sdk.definitions.taskgroup import MappedTaskGroup, TaskGroup -from airflow.serialization.serialized_objects import SerializedDAG -from airflow.utils.state import TaskInstanceState -from airflow.utils.task_group import get_task_group_children_getter, task_group_to_dict +from airflow.serialization.serialized_objects import SerializedBaseOperator +from airflow.utils.task_group import get_task_group_children_getter log = structlog.get_logger(logger_name=__name__) -def get_task_group_map(dag: DAG) -> dict[str, dict[str, Any]]: - """ - Get the Task Group Map for the DAG. - - :param dag: DAG - - :return: Task Group Map - """ - task_nodes: dict[str, dict[str, Any]] = {} - - def _is_task_node_mapped_task_group(task_node: BaseOperator | MappedTaskGroup | TaskMap | None) -> bool: - """Check if the Task Node is a Mapped Task Group.""" - return type(task_node) is MappedTaskGroup - - def _append_child_task_count_to_parent( - child_task_count: int | MappedTaskGroup | TaskMap | MappedOperator | None, - parent_node: BaseOperator | MappedTaskGroup | TaskMap | None, - ): - """ - Append the Child Task Count to the Parent. - - This method should only be used for Mapped Models. - """ - if isinstance(parent_node, TaskGroup): - # Remove the regular task counted in parent_node - task_nodes[parent_node.node_id]["task_count"].append(-1) - # Add the mapped task to the parent_node - task_nodes[parent_node.node_id]["task_count"].append(child_task_count) - - def _fill_task_group_map( - task_node: BaseOperator | MappedTaskGroup | TaskMap | None, - parent_node: BaseOperator | MappedTaskGroup | TaskMap | None, - ) -> None: - """Recursively fill the Task Group Map.""" - if task_node is None: - return - - if isinstance(task_node, MappedOperator): - task_nodes[task_node.node_id] = { - "is_group": False, - "parent_id": parent_node.node_id if parent_node else None, - "task_count": [task_node], - } - # Add the Task Count to the Parent Node because parent node is a Task Group - _append_child_task_count_to_parent(child_task_count=task_node, parent_node=parent_node) - return - - if isinstance(task_node, TaskGroup): - task_count = task_node if _is_task_node_mapped_task_group(task_node) else len(task_node.children) - task_nodes[task_node.node_id] = { - "is_group": True, - "parent_id": parent_node.node_id if parent_node else None, - "task_count": [task_count], - } - for child in get_task_group_children_getter()(task_node): - _fill_task_group_map(task_node=child, parent_node=task_node) - return - - if isinstance(task_node, BaseOperator): - task_nodes[task_node.task_id] = { - "is_group": False, - "parent_id": parent_node.node_id if parent_node else None, - "task_count": task_nodes[parent_node.node_id]["task_count"] - if _is_task_node_mapped_task_group(parent_node) and parent_node - else [1], - } - # No Need to Add the Task Count to the Parent Node, these are already counted in Add the Parent - return - - for node in [child for child in get_task_group_children_getter()(dag.task_group)]: - _fill_task_group_map(task_node=node, parent_node=None) - - return task_nodes - - -def get_child_task_map(parent_task_id: str, task_node_map: dict[str, dict[str, Any]]): - """Get the Child Task Map for the Parent Task ID.""" - return [task_id for task_id, task_map in task_node_map.items() if task_map["parent_id"] == parent_task_id] - - -def _count_tis(node: int | MappedTaskGroup | MappedOperator, run_id: str, session: SessionDep) -> int: - if not isinstance(node, (MappedTaskGroup, MappedOperator)): - return node - with contextlib.suppress(NotFullyPopulated, NotMapped): - return DBBaseOperator.get_mapped_ti_count(node, run_id=run_id, session=session) - # If the downstream is not actually mapped, or we don't have information to - # determine the length yet, simply return 1 to represent the stand-in ti. - return 1 - - -def fill_task_instance_summaries( - grouped_task_instances: dict[tuple[str, str], list], - task_instance_summaries_to_fill: dict[str, list], - session: SessionDep, -) -> None: - """ - Fill the Task Instance Summaries for the Grouped Task Instances. - - :param grouped_task_instances: Grouped Task Instances - :param task_instance_summaries_to_fill: Task Instance Summaries to fill - :param session: Session - - :return: None - """ - # Additional logic to calculate the overall states to cascade recursive task states - overall_states: dict[tuple[str, str], str] = { - (task_id, run_id): next( - ( - str(state.value) - for state in state_priority - for ti in tis - if state is not None and ti.state == state - ), - "no_status", - ) - for (task_id, run_id), tis in grouped_task_instances.items() - } - - serdag_cache: dict[UUID, SerializedDAG] = {} - task_group_map_cache: dict[UUID, dict[str, dict[str, Any]]] = {} - - for (task_id, run_id), tis in grouped_task_instances.items(): - if not tis: - continue - - sdm = _get_serdag(tis[0], session) - serdag_cache[sdm.id] = serdag_cache.get(sdm.id) or sdm.dag - dag = serdag_cache[sdm.id] - task_group_map_cache[sdm.id] = task_group_map_cache.get(sdm.id) or get_task_group_map(dag=dag) - task_node_map = task_group_map_cache[sdm.id] - ti_try_number = max([ti.try_number for ti in tis]) - ti_start_date = min([ti.start_date for ti in tis if ti.start_date], default=None) - ti_end_date = max([ti.end_date for ti in tis if ti.end_date], default=None) - ti_queued_dttm = min([ti.queued_dttm for ti in tis if ti.queued_dttm], default=None) - ti_note = min([ti.note for ti in tis if ti.note], default=None) - - # Calculate the child states for the task - # Initialize the child states with 0 - child_states = {"no_status" if state is None else state.name.lower(): 0 for state in state_priority} - # Update Task States for non-grouped tasks - child_states.update( - { - "no_status" if state is None else state.name.lower(): len( - [ti for ti in tis if ti.state == state] - if not task_node_map[task_id]["is_group"] - else [ - ti - for ti in tis - if ti.state == state and ti.task_id in get_child_task_map(task_id, task_node_map) - ] - ) - for state in state_priority - } - ) - - # Update Nested Task Group States by aggregating the child states - child_states.update( - { - overall_states[(task_node_id, run_id)].lower(): child_states.get( - overall_states[(task_node_id, run_id)].lower(), 0 - ) - + 1 - for task_node_id in get_child_task_map(task_id, task_node_map) - if task_node_map[task_node_id]["is_group"] and (task_node_id, run_id) in overall_states - } - ) - - # Get the overall state for the task - overall_ti_state = next( - ( - state - for state in state_priority - for state_name, state_count in child_states.items() - if state_count > 0 and state_name == state - ), - "no_status", - ) - - # Task Count is either integer or a TaskGroup to get the task count - task_instance_summaries_to_fill[run_id].append( - GridTaskInstanceSummary( - task_id=task_id, - try_number=ti_try_number, - start_date=ti_start_date, - end_date=ti_end_date, - queued_dttm=ti_queued_dttm, - child_states=child_states, - task_count=sum(_count_tis(n, run_id, session) for n in task_node_map[task_id]["task_count"]), - state=TaskInstanceState[overall_ti_state.upper()] - if overall_ti_state != "no_status" - else None, - note=ti_note, - ) - ) - - -def get_structure_from_dag(dag: DAG) -> StructureDataResponse: - """If we do not have TIs, we just get the structure from the DAG.""" - nodes = [task_group_to_dict(child) for child in get_task_group_children_getter()(dag.task_group)] - return StructureDataResponse(nodes=nodes, edges=[]) - - -def _get_serdag(ti, session): - dag_version = ti.dag_version - if not dag_version: - dag_version = session.scalar( - select(DagVersion) - .where( - DagVersion.dag_id == ti.dag_id, - ) - .order_by(DagVersion.id) # ascending cus this is mostly for pre-3.0 upgrade - .limit(1) - ) - if not dag_version: - raise RuntimeError("No dag_version object could be found.") - if not dag_version.serialized_dag: - log.error( - "No serialized dag found", - dag_id=dag_version.dag_id, - version_id=dag_version.id, - version_number=dag_version.version_number, - ) - return dag_version.serialized_dag - - -def get_combined_structure(task_instances, session): - """Given task instances with varying DAG versions, get a combined structure.""" - merged_nodes = [] - # we dedup with serdag, as serdag.dag varies somehow? - serdags = {_get_serdag(ti, session) for ti in task_instances} - dags = [] - for serdag in serdags: - if serdag: - dags.append(serdag.dag) - for dag in dags: - nodes = [task_group_to_dict(child) for child in get_task_group_children_getter()(dag.task_group)] - _merge_node_dicts(merged_nodes, nodes) - - return StructureDataResponse(nodes=merged_nodes, edges=[]) - - def _merge_node_dicts(current, new) -> None: current_ids = {node["id"] for node in current} for node in new: if node["id"] in current_ids: current_node = _get_node_by_id(current, node["id"]) # if we have children, merge those as well - if "children" in current_node: + if current_node.get("children"): _merge_node_dicts(current_node["children"], node["children"]) else: current.append(node) @@ -312,11 +51,6 @@ def _get_node_by_id(nodes, node_id): return {} -def _is_task_node_mapped_task_group(task_node: BaseOperator | MappedTaskGroup | TaskMap | None) -> bool: - """Check if the Task Node is a Mapped Task Group.""" - return type(task_node) is MappedTaskGroup - - def agg_state(states): states = Counter(states) for state in state_priority: @@ -326,7 +60,7 @@ def agg_state(states): def _get_aggs_for_node(detail): - states = [x["state"] for x in detail if x["state"] is not None] + states = [x["state"] for x in detail] try: min_start_date = min(x["start_date"] for x in detail if x["start_date"]) except ValueError: @@ -344,8 +78,8 @@ def _get_aggs_for_node(detail): def _find_aggregates( - node: TaskGroup | BaseOperator | MappedTaskGroup | TaskMap, - parent_node: TaskGroup | BaseOperator | MappedTaskGroup | TaskMap | None, + node: TaskGroup | MappedTaskGroup | SerializedBaseOperator | TaskMap, + parent_node: TaskGroup | MappedTaskGroup | SerializedBaseOperator | TaskMap | None, ti_details: dict[str, list], ) -> Iterable[dict]: """Recursively fill the Task Group Map.""" @@ -385,7 +119,7 @@ def _find_aggregates( **_get_aggs_for_node(children), } return - if isinstance(node, BaseOperator): + if isinstance(node, SerializedBaseOperator): yield { "task_id": node_id, "type": "task", diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py new file mode 100644 index 0000000000000..c75ca8c14f2ee --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/hitl.py @@ -0,0 +1,72 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime +from typing import Any +from uuid import UUID + +from pydantic import Field + +from airflow.api_fastapi.core_api.base import BaseModel +from airflow.models.hitl import HITLDetail + + +class HITLDetailRequest(BaseModel): + """Schema for the request part of a Human-in-the-loop detail for a specific task instance.""" + + ti_id: UUID + options: list[str] + subject: str + body: str | None = None + defaults: list[str] | None = None + multiple: bool = False + params: dict[str, Any] = Field(default_factory=dict) + + +class GetHITLDetailResponsePayload(BaseModel): + """Schema for getting the response part of a Human-in-the-loop detail for a specific task instance.""" + + ti_id: UUID + + +class UpdateHITLDetailPayload(BaseModel): + """Schema for writing the response part of a Human-in-the-loop detail for a specific task instance.""" + + ti_id: UUID + chosen_options: list[str] + params_input: dict[str, Any] = Field(default_factory=dict) + + +class HITLDetailResponse(BaseModel): + """Schema for the response part of a Human-in-the-loop detail for a specific task instance.""" + + response_received: bool + user_id: str | None + response_at: datetime | None + chosen_options: list[str] | None + params_input: dict[str, Any] = Field(default_factory=dict) + + @classmethod + def from_hitl_detail_orm(cls, hitl_detail: HITLDetail) -> HITLDetailResponse: + return HITLDetailResponse( + response_received=hitl_detail.response_received, + response_at=hitl_detail.response_at, + user_id=hitl_detail.user_id, + chosen_options=hitl_detail.chosen_options, + params_input=hitl_detail.params_input or {}, + ) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/taskinstance.py b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/taskinstance.py index 5afc2e75a7193..2d6d46aa9ef2c 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/taskinstance.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/datamodels/taskinstance.py @@ -241,6 +241,7 @@ class TaskInstance(BaseModel): dag_id: str run_id: str try_number: int + dag_version_id: uuid.UUID map_index: int = -1 hostname: str | None = None context_carrier: dict | None = None @@ -300,7 +301,7 @@ class TIRunContext(BaseModel): dag_run: DagRun """DAG run information for the task instance.""" - task_reschedule_count: Annotated[int, Field(default=0)] + task_reschedule_count: int = 0 """How many times the task has been rescheduled.""" max_tries: int @@ -326,7 +327,7 @@ class TIRunContext(BaseModel): xcom_keys_to_clear: Annotated[list[str], Field(default_factory=list)] """List of Xcom keys that need to be cleared and purged on by the worker.""" - should_retry: bool + should_retry: bool = False """If the ti encounters an error, whether it should enter retry or failed state.""" diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py index 164c3f0942d1f..ab163f0bac569 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/__init__.py @@ -26,6 +26,7 @@ connections, dag_runs, health, + hitl, task_instances, task_reschedules, variables, @@ -48,5 +49,6 @@ ) authenticated_router.include_router(variables.router, prefix="/variables", tags=["Variables"]) authenticated_router.include_router(xcoms.router, prefix="/xcoms", tags=["XComs"]) +authenticated_router.include_router(hitl.router, prefix="/hitl-details", tags=["Human in the Loop"]) execution_api_router.include_router(authenticated_router) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py new file mode 100644 index 0000000000000..a82e496a8a7a2 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/hitl.py @@ -0,0 +1,108 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime, timezone +from uuid import UUID + +import structlog +from fastapi import APIRouter, HTTPException, status +from sqlalchemy import select + +from airflow.api_fastapi.common.db.common import SessionDep +from airflow.api_fastapi.execution_api.datamodels.hitl import ( + HITLDetailRequest, + HITLDetailResponse, +) +from airflow.models.hitl import HITLDetail +from airflow.sdk.execution_time.comms import CreateHITLDetailPayload, UpdateHITLDetail + +router = APIRouter() + +log = structlog.get_logger(__name__) + + +@router.post( + "/{task_instance_id}", + status_code=status.HTTP_201_CREATED, +) +def add_hitl_detail( + task_instance_id: UUID, + payload: CreateHITLDetailPayload, + session: SessionDep, +) -> HITLDetailRequest: + """Get Human-in-the-loop detail for a specific Task Instance.""" + ti_id_str = str(task_instance_id) + hitl_detail_model = session.scalar(select(HITLDetail).where(HITLDetail.ti_id == ti_id_str)) + if hitl_detail_model: + raise HTTPException( + status.HTTP_409_CONFLICT, + f"Human-in-the-loop detail for Task Instance with id {ti_id_str} already exists.", + ) + + hitl_detail = HITLDetail( + ti_id=ti_id_str, + options=payload.options, + subject=payload.subject, + body=payload.body, + defaults=payload.defaults, + multiple=payload.multiple, + params=payload.params, + ) + session.add(hitl_detail) + session.commit() + return HITLDetailRequest.model_validate(hitl_detail) + + +@router.patch("/{task_instance_id}") +def update_hitl_detail( + task_instance_id: UUID, + payload: UpdateHITLDetail, + session: SessionDep, +) -> HITLDetailResponse: + """Update the response part of a Human-in-the-loop detail for a specific Task Instance.""" + ti_id_str = str(task_instance_id) + hitl_detail_model = session.execute(select(HITLDetail).where(HITLDetail.ti_id == ti_id_str)).scalar() + if hitl_detail_model.response_received: + raise HTTPException( + status.HTTP_409_CONFLICT, + f"Human-in-the-loop detail for Task Instance with id {ti_id_str} already exists.", + ) + + hitl_detail_model.user_id = "Fallback to defaults" + hitl_detail_model.response_at = datetime.now(timezone.utc) + hitl_detail_model.chosen_options = payload.chosen_options + hitl_detail_model.params_input = payload.params_input + session.add(hitl_detail_model) + session.commit() + return HITLDetailResponse.from_hitl_detail_orm(hitl_detail_model) + + +@router.get( + "/{task_instance_id}", + status_code=status.HTTP_200_OK, +) +def get_hitl_detail( + task_instance_id: UUID, + session: SessionDep, +) -> HITLDetailResponse: + """Get Human-in-the-loop detail for a specific Task Instance.""" + ti_id_str = str(task_instance_id) + hitl_detail_model = session.execute( + select(HITLDetail).where(HITLDetail.ti_id == ti_id_str), + ).scalar() + return HITLDetailResponse.from_hitl_detail_orm(hitl_detail_model) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py b/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py index e22d0a5f34d0d..eef96323d2b7d 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/routes/task_instances.py @@ -23,7 +23,7 @@ from collections import defaultdict from collections.abc import Iterator from datetime import datetime -from typing import TYPE_CHECKING, Annotated, Any +from typing import TYPE_CHECKING, Annotated, Any, cast from uuid import UUID import attrs @@ -73,6 +73,7 @@ if TYPE_CHECKING: from sqlalchemy.sql.dml import Update + from airflow.models.expandinput import SchedulerExpandInput from airflow.sdk.types import Operator @@ -308,9 +309,9 @@ def _get_upstream_map_indexes( mapped_ti_count = upstream_mapped_group.get_parse_time_mapped_ti_count() except NotFullyPopulated: # for cases that needs to resolve xcom to get the correct count - mapped_ti_count = upstream_mapped_group._expand_input.get_total_map_length( - run_id, session=session - ) + mapped_ti_count = cast( + "SchedulerExpandInput", upstream_mapped_group._expand_input + ).get_total_map_length(run_id, session=session) map_indexes = list(range(mapped_ti_count)) if mapped_ti_count is not None else None yield upstream_task.task_id, map_indexes diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/versions/__init__.py b/airflow-core/src/airflow/api_fastapi/execution_api/versions/__init__.py index 5462f10297495..ccef71f7bfb14 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/versions/__init__.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/versions/__init__.py @@ -21,9 +21,11 @@ from airflow.api_fastapi.execution_api.versions.v2025_04_28 import AddRenderedMapIndexField from airflow.api_fastapi.execution_api.versions.v2025_05_20 import DowngradeUpstreamMapIndexes +from airflow.api_fastapi.execution_api.versions.v2025_08_10 import AddDagVersionIdField bundle = VersionBundle( HeadVersion(), + Version("2025-08-10", AddDagVersionIdField), Version("2025-05-20", DowngradeUpstreamMapIndexes), Version("2025-04-28", AddRenderedMapIndexField), Version("2025-04-11"), diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_08_10.py b/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_08_10.py new file mode 100644 index 0000000000000..dcea9a6a0e857 --- /dev/null +++ b/airflow-core/src/airflow/api_fastapi/execution_api/versions/v2025_08_10.py @@ -0,0 +1,30 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from cadwyn import VersionChange, schema + +from airflow.api_fastapi.execution_api.datamodels.taskinstance import TaskInstance + + +class AddDagVersionIdField(VersionChange): + """Add the `dag_version_id` field to the TaskInstance model.""" + + description = __doc__ + + instructions_to_migrate_to_previous_version = (schema(TaskInstance).field("dag_version_id").didnt_exist,) diff --git a/airflow-core/src/airflow/callbacks/callback_requests.py b/airflow-core/src/airflow/callbacks/callback_requests.py index 6c4c978b672de..4d2ed18b36dd5 100644 --- a/airflow-core/src/airflow/callbacks/callback_requests.py +++ b/airflow-core/src/airflow/callbacks/callback_requests.py @@ -61,6 +61,8 @@ class TaskCallbackRequest(BaseCallbackRequest): """Simplified Task Instance representation""" task_callback_type: TaskInstanceState | None = None """Whether on success, on failure, on retry""" + context_from_server: ti_datamodel.TIRunContext | None = None + """Task execution context from the Server""" type: Literal["TaskCallbackRequest"] = "TaskCallbackRequest" @property diff --git a/airflow-core/src/airflow/callbacks/pipe_callback_sink.py b/airflow-core/src/airflow/callbacks/pipe_callback_sink.py deleted file mode 100644 index f7aba11960c18..0000000000000 --- a/airflow-core/src/airflow/callbacks/pipe_callback_sink.py +++ /dev/null @@ -1,51 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import contextlib -from collections.abc import Callable -from typing import TYPE_CHECKING - -from airflow.callbacks.base_callback_sink import BaseCallbackSink - -if TYPE_CHECKING: - from multiprocessing.connection import Connection as MultiprocessingConnection - - from airflow.callbacks.callback_requests import CallbackRequest - - -class PipeCallbackSink(BaseCallbackSink): - """ - Class for sending callbacks to DagProcessor using pipe. - - It is used when DagProcessor is not executed in standalone mode. - """ - - def __init__(self, get_sink_pipe: Callable[[], MultiprocessingConnection]): - self._get_sink_pipe = get_sink_pipe - - def send(self, callback: CallbackRequest): - """ - Send information about the callback to be executed by Pipe. - - :param callback: Callback request to be executed. - """ - with contextlib.suppress(ConnectionError): - # If this died cos of an error then we will noticed and restarted - # when harvest_serialized_dags calls _heartbeat_manager. - self._get_sink_pipe().send(callback) diff --git a/airflow-core/src/airflow/cli/cli_config.py b/airflow-core/src/airflow/cli/cli_config.py index 31022e17219c4..363839a85808c 100644 --- a/airflow-core/src/airflow/cli/cli_config.py +++ b/airflow-core/src/airflow/cli/cli_config.py @@ -633,10 +633,10 @@ def string_lower_type(val): default=conf.get("api", "host"), help="Set the host on which to run the API server", ) -ARG_API_SERVER_ACCESS_LOGFILE = Arg( - ("-A", "--access-logfile"), - default=conf.get("api", "access_logfile"), - help="The logfile to store the access log. Use '-' to print to stdout", +ARG_API_SERVER_LOG_CONFIG = Arg( + ("--log-config",), + default=conf.get("api", "log_config", fallback=None), + help="(Optional) Path to the logging configuration file for the uvicorn server. If not set, the default uvicorn logging configuration will be used.", ) ARG_API_SERVER_APPS = Arg( ("--apps",), @@ -1864,7 +1864,7 @@ class GroupCommand(NamedTuple): ARG_DAEMON, ARG_STDOUT, ARG_STDERR, - ARG_API_SERVER_ACCESS_LOGFILE, + ARG_API_SERVER_LOG_CONFIG, ARG_API_SERVER_APPS, ARG_LOG_FILE, ARG_SSL_CERT, diff --git a/airflow-core/src/airflow/cli/commands/api_server_command.py b/airflow-core/src/airflow/cli/commands/api_server_command.py index 343890aa8e4e8..5a8e1fb86411c 100644 --- a/airflow-core/src/airflow/cli/commands/api_server_command.py +++ b/airflow-core/src/airflow/cli/commands/api_server_command.py @@ -21,13 +21,13 @@ import logging import os import subprocess +import sys import textwrap import uvicorn -from gunicorn.util import daemonize -from setproctitle import setproctitle from airflow import settings +from airflow.cli.commands.daemon_utils import run_command_with_daemon_option from airflow.exceptions import AirflowConfigException from airflow.utils import cli as cli_utils from airflow.utils.providers_configuration_loader import providers_configuration_loaded @@ -40,6 +40,55 @@ # more info here: https://github.com/benoitc/gunicorn/issues/1877#issuecomment-1911136399 +def _run_api_server(args, apps: str, num_workers: int, worker_timeout: int, proxy_headers: bool): + """Run the API server.""" + log.info( + textwrap.dedent( + f"""\ + Running the uvicorn with: + Apps: {apps} + Workers: {num_workers} + Host: {args.host}:{args.port} + Timeout: {worker_timeout} + Logfiles: {args.log_file or "-"} + =================================================================""" + ) + ) + # get ssl cert and key filepaths here instead of passing them as arguments to reduce the number of arguments + ssl_cert, ssl_key = _get_ssl_cert_and_key_filepaths(args) + + # setproctitle causes issue on Mac OS: https://github.com/benoitc/gunicorn/issues/3021 + os_type = sys.platform + if os_type == "darwin": + log.debug("Mac OS detected, skipping setproctitle") + else: + from setproctitle import setproctitle + + setproctitle(f"airflow api_server -- host:{args.host} port:{args.port}") + + uvicorn_kwargs = { + "host": args.host, + "port": args.port, + "workers": num_workers, + "timeout_keep_alive": worker_timeout, + "timeout_graceful_shutdown": worker_timeout, + "ssl_keyfile": ssl_key, + "ssl_certfile": ssl_cert, + "access_log": True, + "proxy_headers": proxy_headers, + } + # Only set the log_config if it is provided, otherwise use the default uvicorn logging configuration. + if args.log_config and args.log_config != "-": + # The [api/log_config] is migrated from [api/access_logfile] and [api/access_logfile] defaults to "-" for stdout for Gunicorn. + # So we need to check if the log_config is set to "-" or not; if it is set to "-", we regard it as not set. + uvicorn_kwargs["log_config"] = args.log_config + + uvicorn.run( + "airflow.api_fastapi.main:app", + **uvicorn_kwargs, + ) + + @cli_utils.action_cli @providers_configuration_loaded def api_server(args): @@ -47,7 +96,6 @@ def api_server(args): print(settings.HEADER) apps = args.apps - access_logfile = args.access_logfile or "-" num_workers = args.workers worker_timeout = args.worker_timeout proxy_headers = args.proxy_headers @@ -74,6 +122,9 @@ def api_server(args): if args.proxy_headers: run_args.append("--proxy-headers") + if args.log_config and args.log_config != "-": + run_args.extend(["--log-config", args.log_config]) + # There is no way to pass the apps to airflow/api_fastapi/main.py in the development mode # because fastapi dev command does not accept any additional arguments # so environment variable is being used to pass it @@ -85,35 +136,16 @@ def api_server(args): process.wait() os.environ.pop("AIRFLOW_API_APPS") else: - if args.daemon: - daemonize() - log.info("Daemonized the API server process PID: %s", os.getpid()) - - log.info( - textwrap.dedent( - f"""\ - Running the uvicorn with: - Apps: {apps} - Workers: {num_workers} - Host: {args.host}:{args.port} - Timeout: {worker_timeout} - Logfiles: {access_logfile} - =================================================================""" - ) - ) - ssl_cert, ssl_key = _get_ssl_cert_and_key_filepaths(args) - setproctitle(f"airflow api_server -- host:{args.host} port:{args.port}") - uvicorn.run( - "airflow.api_fastapi.main:app", - host=args.host, - port=args.port, - workers=num_workers, - timeout_keep_alive=worker_timeout, - timeout_graceful_shutdown=worker_timeout, - ssl_keyfile=ssl_key, - ssl_certfile=ssl_cert, - access_log=access_logfile, - proxy_headers=proxy_headers, + run_command_with_daemon_option( + args=args, + process_name="api_server", + callback=lambda: _run_api_server( + args=args, + apps=apps, + num_workers=num_workers, + worker_timeout=worker_timeout, + proxy_headers=proxy_headers, + ), ) diff --git a/airflow-core/src/airflow/cli/commands/task_command.py b/airflow-core/src/airflow/cli/commands/task_command.py index 9eca6aeeae247..61a8f81c86d72 100644 --- a/airflow-core/src/airflow/cli/commands/task_command.py +++ b/airflow-core/src/airflow/cli/commands/task_command.py @@ -33,6 +33,7 @@ from airflow.exceptions import AirflowConfigException, DagRunNotFound, TaskInstanceNotFound from airflow.models import TaskInstance from airflow.models.dag import DAG as SchedulerDAG, _get_or_create_dagrun +from airflow.models.dag_version import DagVersion from airflow.models.dagrun import DagRun from airflow.sdk.definitions.dag import DAG, _run_task from airflow.sdk.definitions.param import ParamsDict @@ -59,7 +60,7 @@ from sqlalchemy.orm.session import Session - from airflow.models.operator import Operator + from airflow.sdk.types import Operator CreateIfNecessary = Literal[False, "db", "memory"] @@ -200,7 +201,13 @@ def _get_ti( f"run_id or logical_date of {logical_date_or_run_id!r} not found" ) # TODO: Validate map_index is in range? - ti = TaskInstance(task, run_id=dag_run.run_id, map_index=map_index) + dag_version = DagVersion.get_latest_version(dag.dag_id, session=session) + if not dag_version: + # TODO: Remove this once DagVersion.get_latest_version is guaranteed to return a DagVersion/raise + raise ValueError( + f"Cannot create TaskInstance for {dag.dag_id} because the Dag is not serialized." + ) + ti = TaskInstance(task, run_id=dag_run.run_id, map_index=map_index, dag_version_id=dag_version.id) if dag_run in session: session.add(ti) ti.dag_run = dag_run diff --git a/airflow-core/src/airflow/cli/simple_table.py b/airflow-core/src/airflow/cli/simple_table.py index 53125e3f74830..4e321c1c47f7d 100644 --- a/airflow-core/src/airflow/cli/simple_table.py +++ b/airflow-core/src/airflow/cli/simple_table.py @@ -32,7 +32,7 @@ from airflow.utils.platform import is_tty if TYPE_CHECKING: - from airflow.typing_compat import TypeGuard + from typing import TypeGuard def is_data_sequence(data: Sequence[dict | Any]) -> TypeGuard[Sequence[dict]]: diff --git a/airflow-core/src/airflow/config_templates/config.yml b/airflow-core/src/airflow/config_templates/config.yml index e0354cc6fd04b..fa433c427c7ee 100644 --- a/airflow-core/src/airflow/config_templates/config.yml +++ b/airflow-core/src/airflow/config_templates/config.yml @@ -1371,13 +1371,14 @@ api: type: integer example: ~ default: "120" - access_logfile: + log_config: description: | - Log files for the api server. '-' means log to stderr. + Path to the logging configuration file for the uvicorn server. + If not set, the default uvicorn logging configuration will be used. version_added: ~ type: string - example: ~ - default: "-" + example: path/to/logging_config.yaml + default: ~ ssl_cert: description: | Paths to the SSL certificate and key for the api server. When both are diff --git a/airflow-core/src/airflow/configuration.py b/airflow-core/src/airflow/configuration.py index d1bcb8015a0f9..2fbaa798fd7df 100644 --- a/airflow-core/src/airflow/configuration.py +++ b/airflow-core/src/airflow/configuration.py @@ -373,6 +373,7 @@ def sensitive_config_values(self) -> set[tuple[str, str]]: ("api", "require_confirmation_dag_change"): ("webserver", "require_confirmation_dag_change", "3.1.0"), ("api", "instance_name"): ("webserver", "instance_name", "3.1.0"), ("dag_processor", "parsing_pre_import_modules"): ("scheduler", "parsing_pre_import_modules", "3.1.0"), + ("api", "log_config"): ("api", "access_logfile", "3.1.0"), } # A mapping of new section -> (old section, since_version). diff --git a/airflow-core/src/airflow/dag_processing/processor.py b/airflow-core/src/airflow/dag_processing/processor.py index bfc889542fffb..80ab38b041069 100644 --- a/airflow-core/src/airflow/dag_processing/processor.py +++ b/airflow-core/src/airflow/dag_processing/processor.py @@ -16,11 +16,12 @@ # under the License. from __future__ import annotations +import contextlib import importlib import os import sys import traceback -from collections.abc import Callable +from collections.abc import Callable, Sequence from pathlib import Path from typing import TYPE_CHECKING, Annotated, BinaryIO, ClassVar, Literal @@ -45,9 +46,11 @@ VariableResult, ) from airflow.sdk.execution_time.supervisor import WatchedSubprocess +from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance from airflow.serialization.serialized_objects import LazyDeserializedDAG, SerializedDAG from airflow.stats import Stats from airflow.utils.file import iter_airflow_imports +from airflow.utils.state import TaskInstanceState if TYPE_CHECKING: from structlog.typing import FilteringBoundLogger @@ -201,10 +204,7 @@ def _execute_callbacks( for request in callback_requests: log.debug("Processing Callback Request", request=request.to_json()) if isinstance(request, TaskCallbackRequest): - raise NotImplementedError( - "Haven't coded Task callback yet - https://github.com/apache/airflow/issues/44354!" - ) - # _execute_task_callbacks(dagbag, request) + _execute_task_callbacks(dagbag, request, log) if isinstance(request, DagCallbackRequest): _execute_dag_callbacks(dagbag, request, log) @@ -238,6 +238,67 @@ def _execute_dag_callbacks(dagbag: DagBag, request: DagCallbackRequest, log: Fil Stats.incr("dag.callback_exceptions", tags={"dag_id": request.dag_id}) +def _execute_task_callbacks(dagbag: DagBag, request: TaskCallbackRequest, log: FilteringBoundLogger) -> None: + if not request.is_failure_callback: + log.warning( + "Task callback requested but is not a failure callback", + dag_id=request.ti.dag_id, + task_id=request.ti.task_id, + run_id=request.ti.run_id, + ) + return + + dag = dagbag.dags[request.ti.dag_id] + task = dag.get_task(request.ti.task_id) + + if request.task_callback_type is TaskInstanceState.UP_FOR_RETRY: + callbacks = task.on_retry_callback + else: + callbacks = task.on_failure_callback + + if not callbacks: + log.warning( + "Callback requested but no callback found", + dag_id=request.ti.dag_id, + task_id=request.ti.task_id, + run_id=request.ti.run_id, + ti_id=request.ti.id, + ) + return + + callbacks = callbacks if isinstance(callbacks, Sequence) else [callbacks] + ctx_from_server = request.context_from_server + + if ctx_from_server is not None: + runtime_ti = RuntimeTaskInstance.model_construct( + **request.ti.model_dump(exclude_unset=True), + task=task, + _ti_context_from_server=ctx_from_server, + max_tries=ctx_from_server.max_tries, + ) + else: + runtime_ti = RuntimeTaskInstance.model_construct( + **request.ti.model_dump(exclude_unset=True), + task=task, + ) + context = runtime_ti.get_template_context() + + def get_callback_representation(callback): + with contextlib.suppress(AttributeError): + return callback.__name__ + with contextlib.suppress(AttributeError): + return callback.__class__.__name__ + return callback + + for idx, callback in enumerate(callbacks): + callback_repr = get_callback_representation(callback) + log.info("Executing Task callback at index %d: %s", idx, callback_repr) + try: + callback(context) + except Exception: + log.exception("Error in callback at index %d: %s", idx, callback_repr) + + def in_process_api_server() -> InProcessExecutionAPI: from airflow.api_fastapi.execution_api.app import InProcessExecutionAPI diff --git a/airflow-core/src/airflow/example_dags/example_dag_decorator.py b/airflow-core/src/airflow/example_dags/example_dag_decorator.py index 5d1312a888e80..9216805a7f3f8 100644 --- a/airflow-core/src/airflow/example_dags/example_dag_decorator.py +++ b/airflow-core/src/airflow/example_dags/example_dag_decorator.py @@ -23,9 +23,8 @@ import httpx import pendulum -from airflow.models.baseoperator import BaseOperator from airflow.providers.standard.operators.bash import BashOperator -from airflow.sdk import dag, task +from airflow.sdk import BaseOperator, dag, task if TYPE_CHECKING: from airflow.sdk import Context diff --git a/airflow-core/src/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py b/airflow-core/src/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py index c762eee74f96e..07cd653d29b14 100644 --- a/airflow-core/src/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py +++ b/airflow-core/src/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py @@ -21,8 +21,7 @@ from datetime import datetime -from airflow.models.baseoperator import BaseOperator -from airflow.sdk import DAG +from airflow.sdk import DAG, BaseOperator class AddOneOperator(BaseOperator): diff --git a/airflow-core/src/airflow/example_dags/example_skip_dag.py b/airflow-core/src/airflow/example_dags/example_skip_dag.py index 7575494d0d9ab..8226a6e63bd37 100644 --- a/airflow-core/src/airflow/example_dags/example_skip_dag.py +++ b/airflow-core/src/airflow/example_dags/example_skip_dag.py @@ -25,9 +25,8 @@ import pendulum from airflow.exceptions import AirflowSkipException -from airflow.models.baseoperator import BaseOperator from airflow.providers.standard.operators.empty import EmptyOperator -from airflow.sdk import DAG +from airflow.sdk import DAG, BaseOperator from airflow.utils.trigger_rule import TriggerRule if TYPE_CHECKING: diff --git a/airflow-core/src/airflow/exceptions.py b/airflow-core/src/airflow/exceptions.py index 045f9647ade76..6fee92b0af32a 100644 --- a/airflow-core/src/airflow/exceptions.py +++ b/airflow-core/src/airflow/exceptions.py @@ -21,7 +21,6 @@ from __future__ import annotations -import warnings from collections.abc import Collection, Sequence from datetime import datetime, timedelta from http import HTTPStatus @@ -30,8 +29,6 @@ from airflow.utils.trigger_rule import TriggerRule if TYPE_CHECKING: - from collections.abc import Sized - from airflow.models import DagRun from airflow.sdk.definitions.asset import AssetNameRef, AssetUniqueKey, AssetUriRef from airflow.utils.state import DagRunState @@ -102,10 +99,6 @@ class AirflowTaskTerminated(BaseException): """Raise when the task execution is terminated.""" -class AirflowWebServerTimeout(AirflowException): - """Raise when the web server times out.""" - - class AirflowSkipException(AirflowException): """Raise when the task should be skipped.""" @@ -181,38 +174,6 @@ def serialize(self): ) -class XComForMappingNotPushed(AirflowException): - """Raise when a mapped downstream's dependency fails to push XCom for task mapping.""" - - def __str__(self) -> str: - return "did not push XCom for task mapping" - - -class UnmappableXComTypePushed(AirflowException): - """Raise when an unmappable type is pushed as a mapped downstream's dependency.""" - - def __init__(self, value: Any, *values: Any) -> None: - super().__init__(value, *values) - - def __str__(self) -> str: - typename = type(self.args[0]).__qualname__ - for arg in self.args[1:]: - typename = f"{typename}[{type(arg).__qualname__}]" - return f"unmappable return type {typename!r}" - - -class UnmappableXComLengthPushed(AirflowException): - """Raise when the pushed value is too large to map as a downstream's dependency.""" - - def __init__(self, value: Sized, max_length: int) -> None: - super().__init__(value) - self.value = value - self.max_length = max_length - - def __str__(self) -> str: - return f"unmappable return value length: {len(self.value)} > {self.max_length}" - - class AirflowDagCycleException(AirflowException): """Raise when there is a cycle in DAG definition.""" @@ -284,14 +245,6 @@ def serialize(self): ) -class DagFileExists(AirflowBadRequest): - """Raise when a DAG ID is still in DagBag i.e., DAG file is in DAG folder.""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - warnings.warn("DagFileExists is deprecated and will be removed.", DeprecationWarning, stacklevel=2) - - class FailFastDagInvalidTriggerRule(AirflowException): """Raise when a dag has 'fail_fast' enabled yet has a non-default trigger rule.""" diff --git a/airflow-core/src/airflow/executors/workloads.py b/airflow-core/src/airflow/executors/workloads.py index bca3a777b3655..43a4aab1dbc47 100644 --- a/airflow-core/src/airflow/executors/workloads.py +++ b/airflow-core/src/airflow/executors/workloads.py @@ -55,7 +55,7 @@ class TaskInstance(BaseModel): """Schema for TaskInstance with minimal required fields needed for Executors and Task SDK.""" id: uuid.UUID - + dag_version_id: uuid.UUID task_id: str dag_id: str run_id: str diff --git a/airflow-core/src/airflow/io/__init__.py b/airflow-core/src/airflow/io/__init__.py index 6bbea93e59ba3..3b255aacdf82a 100644 --- a/airflow-core/src/airflow/io/__init__.py +++ b/airflow-core/src/airflow/io/__init__.py @@ -16,102 +16,26 @@ # under the License. from __future__ import annotations -import inspect -import logging -from collections.abc import Callable, Mapping -from functools import cache -from typing import ( - TYPE_CHECKING, +from airflow.utils.deprecation_tools import add_deprecated_classes + +add_deprecated_classes( + { + __name__: { + "get_fs": "airflow.sdk.io.get_fs", + "has_fs": "airflow.sdk.io.has_fs", + "attach": "airflow.sdk.io.attach", + "Properties": "airflow.sdk.io.Properties", + "_BUILTIN_SCHEME_TO_FS": "airflow.sdk.io.fs._BUILTIN_SCHEME_TO_FS", + }, + "path": { + "ObjectStoragePath": "airflow.sdk.ObjectStoragePath", + }, + "storage": { + "attach": "airflow.sdk.io.attach", + }, + "typedef": { + "Properties": "airflow.sdk.io.typedef.Properties", + }, + }, + package=__name__, ) - -from fsspec.implementations.local import LocalFileSystem - -from airflow.providers_manager import ProvidersManager -from airflow.stats import Stats -from airflow.utils.module_loading import import_string - -if TYPE_CHECKING: - from fsspec import AbstractFileSystem - - from airflow.io.typedef import Properties - - -log = logging.getLogger(__name__) - - -def _file(_: str | None, storage_options: Properties) -> LocalFileSystem: - return LocalFileSystem(**storage_options) - - -# builtin supported filesystems -_BUILTIN_SCHEME_TO_FS: dict[str, Callable[[str | None, Properties], AbstractFileSystem]] = { - "file": _file, - "local": _file, -} - - -@cache -def _register_filesystems() -> Mapping[ - str, - Callable[[str | None, Properties], AbstractFileSystem] | Callable[[str | None], AbstractFileSystem], -]: - scheme_to_fs = _BUILTIN_SCHEME_TO_FS.copy() - with Stats.timer("airflow.io.load_filesystems") as timer: - manager = ProvidersManager() - for fs_module_name in manager.filesystem_module_names: - fs_module = import_string(fs_module_name) - for scheme in getattr(fs_module, "schemes", []): - if scheme in scheme_to_fs: - log.warning("Overriding scheme %s for %s", scheme, fs_module_name) - - method = getattr(fs_module, "get_fs", None) - if method is None: - raise ImportError(f"Filesystem {fs_module_name} does not have a get_fs method") - scheme_to_fs[scheme] = method - - log.debug("loading filesystems from providers took %.3f seconds", timer.duration) - return scheme_to_fs - - -def get_fs( - scheme: str, conn_id: str | None = None, storage_options: Properties | None = None -) -> AbstractFileSystem: - """ - Get a filesystem by scheme. - - :param scheme: the scheme to get the filesystem for - :return: the filesystem method - :param conn_id: the airflow connection id to use - :param storage_options: the storage options to pass to the filesystem - """ - filesystems = _register_filesystems() - try: - fs = filesystems[scheme] - except KeyError: - raise ValueError(f"No filesystem registered for scheme {scheme}") from None - - options = storage_options or {} - - # MyPy does not recognize dynamic parameters inspection when we call the method, and we have to do - # it for compatibility reasons with already released providers, that's why we need to ignore - # mypy errors here - parameters = inspect.signature(fs).parameters - if len(parameters) == 1: - if options: - raise AttributeError( - f"Filesystem {scheme} does not support storage options, but options were passed." - f"This most likely means that you are using an old version of the provider that does not " - f"support storage options. Please upgrade the provider if possible." - ) - return fs(conn_id) # type: ignore[call-arg] - return fs(conn_id, options) # type: ignore[call-arg] - - -def has_fs(scheme: str) -> bool: - """ - Check if a filesystem is available for a scheme. - - :param scheme: the scheme to check - :return: True if a filesystem is available for the scheme - """ - return scheme in _register_filesystems() diff --git a/airflow-core/src/airflow/jobs/scheduler_job_runner.py b/airflow-core/src/airflow/jobs/scheduler_job_runner.py index 6c23c6a88096f..da269af423a66 100644 --- a/airflow-core/src/airflow/jobs/scheduler_job_runner.py +++ b/airflow-core/src/airflow/jobs/scheduler_job_runner.py @@ -38,6 +38,7 @@ from sqlalchemy.sql import expression from airflow import settings +from airflow.api_fastapi.execution_api.datamodels.taskinstance import TIRunContext from airflow.callbacks.callback_requests import DagCallbackRequest, TaskCallbackRequest from airflow.configuration import conf from airflow.dag_processing.bundles.base import BundleUsageTrackingManager @@ -927,10 +928,16 @@ def process_executor_events( bundle_version=ti.dag_version.bundle_version, ti=ti, msg=msg, + context_from_server=TIRunContext( + dag_run=ti.dag_run, + max_tries=ti.max_tries, + variables=[], + connections=[], + xcom_keys_to_clear=[], + ), ) executor.send_callback(request) - else: - ti.handle_failure(error=msg, session=session) + ti.handle_failure(error=msg, session=session) return len(event_buffer) @@ -2283,6 +2290,13 @@ def _purge_task_instances_without_heartbeats( bundle_version=ti.dag_run.bundle_version, ti=ti, msg=str(task_instance_heartbeat_timeout_message_details), + context_from_server=TIRunContext( + dag_run=ti.dag_run, + max_tries=ti.max_tries, + variables=[], + connections=[], + xcom_keys_to_clear=[], + ), ) session.add( Log( diff --git a/airflow-core/src/airflow/jobs/triggerer_job_runner.py b/airflow-core/src/airflow/jobs/triggerer_job_runner.py index fd02baecb7176..07966655cfe99 100644 --- a/airflow-core/src/airflow/jobs/triggerer_job_runner.py +++ b/airflow-core/src/airflow/jobs/triggerer_job_runner.py @@ -43,6 +43,7 @@ from airflow.jobs.base_job_runner import BaseJobRunner from airflow.jobs.job import perform_heartbeat from airflow.models.trigger import Trigger +from airflow.sdk.api.datamodels._generated import HITLDetailResponse from airflow.sdk.execution_time.comms import ( CommsDecoder, ConnectionResult, @@ -52,12 +53,14 @@ GetConnection, GetDagRunState, GetDRCount, + GetHITLDetailResponse, GetTaskStates, GetTICount, GetVariable, GetXCom, TaskStatesResult, TICount, + UpdateHITLDetail, VariableResult, XComResult, _RequestFrame, @@ -209,6 +212,23 @@ class TriggerStateSync(BaseModel): to_cancel: set[int] +class HITLDetailResponseResult(HITLDetailResponse): + """Response to GetHITLDetailResponse request.""" + + type: Literal["HITLDetailResponseResult"] = "HITLDetailResponseResult" + + @classmethod + def from_api_response(cls, response: HITLDetailResponse) -> HITLDetailResponseResult: + """ + Create result class from API Response. + + API Response is autogenerated from the API schema, so we need to convert it to Result + for communication between the Supervisor and the task process since it needs a + discriminator field. + """ + return cls(**response.model_dump(exclude_defaults=True), type="HITLDetailResponseResult") + + ToTriggerRunner = Annotated[ messages.StartTriggerer | messages.TriggerStateSync @@ -219,6 +239,7 @@ class TriggerStateSync(BaseModel): | DRCount | TICount | TaskStatesResult + | HITLDetailResponseResult | ErrorResponse, Field(discriminator="type"), ] @@ -236,7 +257,9 @@ class TriggerStateSync(BaseModel): | GetTICount | GetTaskStates | GetDagRunState - | GetDRCount, + | GetDRCount + | GetHITLDetailResponse + | UpdateHITLDetail, Field(discriminator="type"), ] """ @@ -448,6 +471,16 @@ def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger, r resp = TaskStatesResult.from_api_response(run_id_task_state_map) else: resp = run_id_task_state_map + elif isinstance(msg, UpdateHITLDetail): + api_resp = self.client.hitl.update_response( + ti_id=msg.ti_id, + chosen_options=msg.chosen_options, + params_input=msg.params_input, + ) + resp = HITLDetailResponseResult.from_api_response(response=api_resp) + elif isinstance(msg, GetHITLDetailResponse): + api_resp = self.client.hitl.get_detail_response(ti_id=msg.ti_id) + resp = HITLDetailResponseResult.from_api_response(response=api_resp) else: raise ValueError(f"Unknown message type {type(msg)}") diff --git a/airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_dag_favorite_table.py b/airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_dag_favorite_table.py new file mode 100644 index 0000000000000..657f2741e4ab8 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0075_3_1_0_add_dag_favorite_table.py @@ -0,0 +1,57 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add dag_favorite table. + +Revision ID: ffdb0566c7c0 +Revises: 66a7743fe20e +Create Date: 2025-06-05 15:06:08.903908 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +from airflow.models.base import COLLATION_ARGS + +revision = "ffdb0566c7c0" +down_revision = "66a7743fe20e" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Apply add dag_favorite table.""" + op.create_table( + "dag_favorite", + sa.Column("user_id", sa.String(length=250), nullable=False), + sa.Column("dag_id", sa.String(length=250, **COLLATION_ARGS), nullable=False), + sa.ForeignKeyConstraint( + ["dag_id"], ["dag.dag_id"], name=op.f("dag_favorite_dag_id_fkey"), ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("user_id", "dag_id", name=op.f("dag_favorite_pkey")), + ) + + +def downgrade(): + """Unapply add dag_favorite table.""" + op.drop_table("dag_favorite") diff --git a/airflow-core/src/airflow/migrations/versions/0076_3_1_0_make_dag_version_id_non_nullable_in_.py b/airflow-core/src/airflow/migrations/versions/0076_3_1_0_make_dag_version_id_non_nullable_in_.py new file mode 100644 index 0000000000000..cbd183f7b8f43 --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0076_3_1_0_make_dag_version_id_non_nullable_in_.py @@ -0,0 +1,81 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Make dag_version_id non-nullable in TaskInstance. + +Revision ID: 5d3072c51bac +Revises: ffdb0566c7c0 +Create Date: 2025-05-20 10:38:25.635779 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op +from sqlalchemy_utils import UUIDType + +# revision identifiers, used by Alembic. +revision = "5d3072c51bac" +down_revision = "ffdb0566c7c0" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Apply make dag_version_id non-nullable in TaskInstance.""" + conn = op.get_bind() + if conn.dialect.name == "postgresql": + update_query = sa.text(""" + UPDATE task_instance + SET dag_version_id = latest_versions.id + FROM ( + SELECT DISTINCT ON (dag_id) dag_id, id + FROM dag_version + ORDER BY dag_id, created_at DESC + ) latest_versions + WHERE task_instance.dag_id = latest_versions.dag_id + AND task_instance.dag_version_id IS NULL + """) + else: + update_query = sa.text(""" + UPDATE task_instance + SET dag_version_id = ( + SELECT id FROM ( + SELECT id, dag_id, + ROW_NUMBER() OVER (PARTITION BY dag_id ORDER BY created_at DESC) as rn + FROM dag_version + ) ranked_versions + WHERE ranked_versions.dag_id = task_instance.dag_id + AND ranked_versions.rn = 1 + ) + WHERE task_instance.dag_version_id IS NULL + """) + + op.execute(update_query) + + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.alter_column("dag_version_id", existing_type=UUIDType(binary=False), nullable=False) + + +def downgrade(): + """Unapply make dag_version_id non-nullable in TaskInstance.""" + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.alter_column("dag_version_id", existing_type=UUIDType(binary=False), nullable=True) diff --git a/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py new file mode 100644 index 0000000000000..61f950f5d120e --- /dev/null +++ b/airflow-core/src/airflow/migrations/versions/0077_3_1_0_add_human_in_the_loop_response.py @@ -0,0 +1,78 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add Human In the Loop Detail table. + +Revision ID: 40f7c30a228b +Revises: 5d3072c51bac +Create Date: 2025-07-04 15:05:19.459197 + +""" + +from __future__ import annotations + +import sqlalchemy_jsonfield +from alembic import op +from sqlalchemy import Boolean, Column, ForeignKeyConstraint, String, Text +from sqlalchemy.dialects import postgresql + +from airflow.settings import json +from airflow.utils.sqlalchemy import UtcDateTime + +# revision identifiers, used by Alembic. +revision = "40f7c30a228b" +down_revision = "5d3072c51bac" +branch_labels = None +depends_on = None +airflow_version = "3.1.0" + + +def upgrade(): + """Add Human In the Loop Detail table.""" + op.create_table( + "hitl_detail", + Column( + "ti_id", + String(length=36).with_variant(postgresql.UUID(), "postgresql"), + primary_key=True, + nullable=False, + ), + Column("options", sqlalchemy_jsonfield.JSONField(json=json), nullable=False), + Column("subject", Text, nullable=False), + Column("body", Text, nullable=True), + Column("defaults", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), + Column("multiple", Boolean, unique=False, default=False), + Column("params", sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}), + Column("response_at", UtcDateTime, nullable=True), + Column("user_id", String(128), nullable=True), + Column("chosen_options", sqlalchemy_jsonfield.JSONField(json=json), nullable=True), + Column("params_input", sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}), + ForeignKeyConstraint( + ["ti_id"], + ["task_instance.id"], + name="hitl_detail_ti_fkey", + ondelete="CASCADE", + onupdate="CASCADE", + ), + ) + + +def downgrade(): + """Response Human In the Loop Detail table.""" + op.drop_table("hitl_detail") diff --git a/airflow-core/src/airflow/models/__init__.py b/airflow-core/src/airflow/models/__init__.py index b28c3f2b0639a..c8640647da63f 100644 --- a/airflow-core/src/airflow/models/__init__.py +++ b/airflow-core/src/airflow/models/__init__.py @@ -60,6 +60,7 @@ def import_all_models(): import airflow.models.asset import airflow.models.backfill + import airflow.models.dag_favorite import airflow.models.dag_version import airflow.models.dagbundle import airflow.models.dagwarning @@ -90,7 +91,7 @@ def __getattr__(name): "DAG": "airflow.models.dag", "ID_LEN": "airflow.models.base", "Base": "airflow.models.base", - "BaseOperator": "airflow.models.baseoperator", + "BaseOperator": "airflow.sdk.bases.operator", "BaseOperatorLink": "airflow.sdk.bases.operatorlink", "BaseXCom": "airflow.sdk.bases.xcom", "Connection": "airflow.models.connection", @@ -102,8 +103,8 @@ def __getattr__(name): "DbCallbackRequest": "airflow.models.db_callback_request", "Deadline": "airflow.models.deadline", "Log": "airflow.models.log", + "HITLDetail": "airflow.models.hitl", "MappedOperator": "airflow.models.mappedoperator", - "Operator": "airflow.models.operator", "Param": "airflow.sdk.definitions.param", "Pool": "airflow.models.pool", "RenderedTaskInstanceFields": "airflow.models.renderedtifields", @@ -120,7 +121,6 @@ def __getattr__(name): # I was unable to get mypy to respect a airflow/models/__init__.pyi, so # having to resort back to this hacky method from airflow.models.base import ID_LEN, Base - from airflow.models.baseoperator import BaseOperator from airflow.models.connection import Connection from airflow.models.dag import DAG, DagModel, DagTag from airflow.models.dagbag import DagBag @@ -130,7 +130,6 @@ def __getattr__(name): from airflow.models.deadline import Deadline from airflow.models.log import Log from airflow.models.mappedoperator import MappedOperator - from airflow.models.operator import Operator from airflow.models.pool import Pool from airflow.models.renderedtifields import RenderedTaskInstanceFields from airflow.models.skipmixin import SkipMixin @@ -139,6 +138,7 @@ def __getattr__(name): from airflow.models.taskreschedule import TaskReschedule from airflow.models.trigger import Trigger from airflow.models.variable import Variable + from airflow.sdk.bases.operator import BaseOperator from airflow.sdk.bases.operatorlink import BaseOperatorLink from airflow.sdk.bases.xcom import BaseXCom from airflow.sdk.definitions.param import Param @@ -158,8 +158,16 @@ def __getattr__(name): "Param": "airflow.sdk.definitions.param.Param", "ParamsDict": "airflow.sdk.definitions.param.ParamsDict", }, + "baseoperator": { + "BaseOperator": "airflow.sdk.bases.operator.BaseOperator", + "chain": "airflow.sdk.bases.operator.chain", + }, "baseoperatorlink": { "BaseOperatorLink": "airflow.sdk.bases.operatorlink.BaseOperatorLink", }, + "operator": { + "BaseOperator": "airflow.sdk.bases.operator.BaseOperator", + "Operator": "airflow.sdk.types.Operator", + }, } add_deprecated_classes(__deprecated_classes, __name__) diff --git a/airflow-core/src/airflow/models/baseoperator.py b/airflow-core/src/airflow/models/baseoperator.py deleted file mode 100644 index 1d02cfbeff764..0000000000000 --- a/airflow-core/src/airflow/models/baseoperator.py +++ /dev/null @@ -1,522 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -""" -Base operator for all operators. - -:sphinx-autoapi-skip: -""" - -from __future__ import annotations - -import functools -import logging -import operator -from collections.abc import Collection, Iterable, Iterator -from datetime import datetime -from functools import singledispatchmethod -from typing import TYPE_CHECKING, Any - -from sqlalchemy import select - -# Keeping this file at all is a temp thing as we migrate the repo to the task sdk as the base, but to keep -# main working and useful for others to develop against we use the TaskSDK here but keep this file around -from airflow.models.taskinstance import TaskInstance -from airflow.sdk.bases.operator import ( - BaseOperator as TaskSDKBaseOperator, - # Re-export for compat - chain as chain, - chain_linear as chain_linear, - cross_downstream as cross_downstream, - get_merged_defaults as get_merged_defaults, -) -from airflow.sdk.definitions._internal.abstractoperator import ( - AbstractOperator as TaskSDKAbstractOperator, - NotMapped, -) -from airflow.sdk.definitions.mappedoperator import MappedOperator -from airflow.sdk.definitions.taskgroup import MappedTaskGroup, TaskGroup -from airflow.serialization.enums import DagAttributeTypes -from airflow.ti_deps.deps.mapped_task_upstream_dep import MappedTaskUpstreamDep -from airflow.ti_deps.deps.not_in_retry_period_dep import NotInRetryPeriodDep -from airflow.ti_deps.deps.not_previously_skipped_dep import NotPreviouslySkippedDep -from airflow.ti_deps.deps.prev_dagrun_dep import PrevDagrunDep -from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep -from airflow.utils import timezone -from airflow.utils.session import NEW_SESSION, provide_session - -if TYPE_CHECKING: - from sqlalchemy.orm import Session - - from airflow.models.dag import DAG as SchedulerDAG - from airflow.models.operator import Operator - from airflow.sdk import BaseOperatorLink, Context - from airflow.sdk.definitions._internal.node import DAGNode - from airflow.ti_deps.deps.base_ti_dep import BaseTIDep - from airflow.triggers.base import StartTriggerArgs - -logger = logging.getLogger("airflow.models.baseoperator.BaseOperator") - - -class BaseOperator(TaskSDKBaseOperator): - r""" - Abstract base class for all operators. - - Since operators create objects that become nodes in the DAG, BaseOperator - contains many recursive methods for DAG crawling behavior. To derive from - this class, you are expected to override the constructor and the 'execute' - method. - - Operators derived from this class should perform or trigger certain tasks - synchronously (wait for completion). Example of operators could be an - operator that runs a Pig job (PigOperator), a sensor operator that - waits for a partition to land in Hive (HiveSensorOperator), or one that - moves data from Hive to MySQL (Hive2MySqlOperator). Instances of these - operators (tasks) target specific operations, running specific scripts, - functions or data transfers. - - This class is abstract and shouldn't be instantiated. Instantiating a - class derived from this one results in the creation of a task object, - which ultimately becomes a node in DAG objects. Task dependencies should - be set by using the set_upstream and/or set_downstream methods. - - :param task_id: a unique, meaningful id for the task - :param owner: the owner of the task. Using a meaningful description - (e.g. user/person/team/role name) to clarify ownership is recommended. - :param email: the 'to' email address(es) used in email alerts. This can be a - single email or multiple ones. Multiple addresses can be specified as a - comma or semicolon separated string or by passing a list of strings. - :param email_on_retry: Indicates whether email alerts should be sent when a - task is retried - :param email_on_failure: Indicates whether email alerts should be sent when - a task failed - :param retries: the number of retries that should be performed before - failing the task - :param retry_delay: delay between retries, can be set as ``timedelta`` or - ``float`` seconds, which will be converted into ``timedelta``, - the default is ``timedelta(seconds=300)``. - :param retry_exponential_backoff: allow progressively longer waits between - retries by using exponential backoff algorithm on retry delay (delay - will be converted into seconds) - :param max_retry_delay: maximum delay interval between retries, can be set as - ``timedelta`` or ``float`` seconds, which will be converted into ``timedelta``. - :param start_date: The ``start_date`` for the task, determines - the ``logical_date`` for the first task instance. The best practice - is to have the start_date rounded - to your DAG's schedule. Daily jobs have their start_date - some day at 00:00:00, hourly jobs have their start_date at 00:00 - of a specific hour. Note that Airflow simply looks at the latest - ``logical_date`` and adds the schedule to determine - the next ``logical_date``. It is also very important - to note that different tasks' dependencies - need to line up in time. If task A depends on task B and their - start_date are offset in a way that their logical_date don't line - up, A's dependencies will never be met. If you are looking to delay - a task, for example running a daily task at 2AM, look into the - ``TimeSensor`` and ``TimeDeltaSensor``. We advise against using - dynamic ``start_date`` and recommend using fixed ones. Read the - FAQ entry about start_date for more information. - :param end_date: if specified, the scheduler won't go beyond this date - :param depends_on_past: when set to true, task instances will run - sequentially and only if the previous instance has succeeded or has been skipped. - The task instance for the start_date is allowed to run. - :param wait_for_past_depends_before_skipping: when set to true, if the task instance - should be marked as skipped, and depends_on_past is true, the ti will stay on None state - waiting the task of the previous run - :param wait_for_downstream: when set to true, an instance of task - X will wait for tasks immediately downstream of the previous instance - of task X to finish successfully or be skipped before it runs. This is useful if the - different instances of a task X alter the same asset, and this asset - is used by tasks downstream of task X. Note that depends_on_past - is forced to True wherever wait_for_downstream is used. Also note that - only tasks *immediately* downstream of the previous task instance are waited - for; the statuses of any tasks further downstream are ignored. - :param dag: a reference to the dag the task is attached to (if any) - :param priority_weight: priority weight of this task against other task. - This allows the executor to trigger higher priority tasks before - others when things get backed up. Set priority_weight as a higher - number for more important tasks. - As not all database engines support 64-bit integers, values are capped with 32-bit. - Valid range is from -2,147,483,648 to 2,147,483,647. - :param weight_rule: weighting method used for the effective total - priority weight of the task. Options are: - ``{ downstream | upstream | absolute }`` default is ``downstream`` - When set to ``downstream`` the effective weight of the task is the - aggregate sum of all downstream descendants. As a result, upstream - tasks will have higher weight and will be scheduled more aggressively - when using positive weight values. This is useful when you have - multiple dag run instances and desire to have all upstream tasks to - complete for all runs before each dag can continue processing - downstream tasks. When set to ``upstream`` the effective weight is the - aggregate sum of all upstream ancestors. This is the opposite where - downstream tasks have higher weight and will be scheduled more - aggressively when using positive weight values. This is useful when you - have multiple dag run instances and prefer to have each dag complete - before starting upstream tasks of other dags. When set to - ``absolute``, the effective weight is the exact ``priority_weight`` - specified without additional weighting. You may want to do this when - you know exactly what priority weight each task should have. - Additionally, when set to ``absolute``, there is bonus effect of - significantly speeding up the task creation process as for very large - DAGs. Options can be set as string or using the constants defined in - the static class ``airflow.utils.WeightRule``. - Irrespective of the weight rule, resulting priority values are capped with 32-bit. - |experimental| - Since 2.9.0, Airflow allows to define custom priority weight strategy, - by creating a subclass of - ``airflow.task.priority_strategy.PriorityWeightStrategy`` and registering - in a plugin, then providing the class path or the class instance via - ``weight_rule`` parameter. The custom priority weight strategy will be - used to calculate the effective total priority weight of the task instance. - :param queue: which queue to target when running this job. Not - all executors implement queue management, the CeleryExecutor - does support targeting specific queues. - :param pool: the slot pool this task should run in, slot pools are a - way to limit concurrency for certain tasks - :param pool_slots: the number of pool slots this task should use (>= 1) - Values less than 1 are not allowed. - :param sla: DEPRECATED - The SLA feature is removed in Airflow 3.0, to be replaced with a new implementation in 3.1 - :param execution_timeout: max time allowed for the execution of - this task instance, if it goes beyond it will raise and fail. - :param on_failure_callback: a function or list of functions to be called when a task instance - of this task fails. a context dictionary is passed as a single - parameter to this function. Context contains references to related - objects to the task instance and is documented under the macros - section of the API. - :param on_execute_callback: much like the ``on_failure_callback`` except - that it is executed right before the task is executed. - :param on_retry_callback: much like the ``on_failure_callback`` except - that it is executed when retries occur. - :param on_success_callback: much like the ``on_failure_callback`` except - that it is executed when the task succeeds. - :param on_skipped_callback: much like the ``on_failure_callback`` except - that it is executed when skipped occur; this callback will be called only if AirflowSkipException get raised. - Explicitly it is NOT called if a task is not started to be executed because of a preceding branching - decision in the DAG or a trigger rule which causes execution to skip so that the task execution - is never scheduled. - :param pre_execute: a function to be called immediately before task - execution, receiving a context dictionary; raising an exception will - prevent the task from being executed. - - |experimental| - :param post_execute: a function to be called immediately after task - execution, receiving a context dictionary and task result; raising an - exception will prevent the task from succeeding. - - |experimental| - :param trigger_rule: defines the rule by which dependencies are applied - for the task to get triggered. Options are: - ``{ all_success | all_failed | all_done | all_skipped | one_success | one_done | - one_failed | none_failed | none_failed_min_one_success | none_skipped | always}`` - default is ``all_success``. Options can be set as string or - using the constants defined in the static class - ``airflow.utils.TriggerRule`` - :param resources: A map of resource parameter names (the argument names of the - Resources constructor) to their values. - :param run_as_user: unix username to impersonate while running the task - :param max_active_tis_per_dag: When set, a task will be able to limit the concurrent - runs across logical_dates. - :param max_active_tis_per_dagrun: When set, a task will be able to limit the concurrent - task instances per DAG run. - :param executor: Which executor to target when running this task. NOT YET SUPPORTED - :param executor_config: Additional task-level configuration parameters that are - interpreted by a specific executor. Parameters are namespaced by the name of - executor. - - **Example**: to run this task in a specific docker container through - the KubernetesExecutor :: - - MyOperator(..., executor_config={"KubernetesExecutor": {"image": "myCustomDockerImage"}}) - - :param do_xcom_push: if True, an XCom is pushed containing the Operator's - result - :param multiple_outputs: if True and do_xcom_push is True, pushes multiple XComs, one for each - key in the returned dictionary result. If False and do_xcom_push is True, pushes a single XCom. - :param task_group: The TaskGroup to which the task should belong. This is typically provided when not - using a TaskGroup as a context manager. - :param doc: Add documentation or notes to your Task objects that is visible in - Task Instance details View in the Webserver - :param doc_md: Add documentation (in Markdown format) or notes to your Task objects - that is visible in Task Instance details View in the Webserver - :param doc_rst: Add documentation (in RST format) or notes to your Task objects - that is visible in Task Instance details View in the Webserver - :param doc_json: Add documentation (in JSON format) or notes to your Task objects - that is visible in Task Instance details View in the Webserver - :param doc_yaml: Add documentation (in YAML format) or notes to your Task objects - that is visible in Task Instance details View in the Webserver - :param task_display_name: The display name of the task which appears on the UI. - :param logger_name: Name of the logger used by the Operator to emit logs. - If set to `None` (default), the logger name will fall back to - `airflow.task.operators.{class.__module__}.{class.__name__}` (e.g. SimpleHttpOperator will have - *airflow.task.operators.airflow.providers.http.operators.http.SimpleHttpOperator* as logger). - :param allow_nested_operators: if True, when an operator is executed within another one a warning message - will be logged. If False, then an exception will be raised if the operator is badly used (e.g. nested - within another one). In future releases of Airflow this parameter will be removed and an exception - will always be thrown when operators are nested within each other (default is True). - - **Example**: example of a bad operator mixin usage:: - - @task(provide_context=True) - def say_hello_world(**context): - hello_world_task = BashOperator( - task_id="hello_world_task", - bash_command="python -c \"print('Hello, world!')\"", - dag=dag, - ) - hello_world_task.execute(context) - """ - - def __init__(self, **kwargs): - if start_date := kwargs.get("start_date", None): - kwargs["start_date"] = timezone.convert_to_utc(start_date) - if end_date := kwargs.get("end_date", None): - kwargs["end_date"] = timezone.convert_to_utc(end_date) - super().__init__(**kwargs) - - # Defines the operator level extra links - operator_extra_links: Collection[BaseOperatorLink] = () - - if TYPE_CHECKING: - - @property # type: ignore[override] - def dag(self) -> SchedulerDAG: # type: ignore[override] - return super().dag # type: ignore[return-value] - - @dag.setter - def dag(self, val: SchedulerDAG): - # For type checking only - ... - - def get_inlet_defs(self): - """ - Get inlet definitions on this task. - - :meta private: - """ - return self.inlets - - def get_outlet_defs(self): - """ - Get outlet definitions on this task. - - :meta private: - """ - return self.outlets - - deps: frozenset[BaseTIDep] = frozenset( - { - NotInRetryPeriodDep(), - PrevDagrunDep(), - TriggerRuleDep(), - NotPreviouslySkippedDep(), - MappedTaskUpstreamDep(), - } - ) - """ - Returns the set of dependencies for the operator. These differ from execution - context dependencies in that they are specific to tasks and can be - extended/overridden by subclasses. - """ - - def execute(self, context: Context) -> Any: - """ - Derive when creating an operator. - - Context is the same dictionary used as when rendering jinja templates. - - Refer to get_template_context for more context. - """ - raise NotImplementedError() - - @provide_session - def get_task_instances( - self, - start_date: datetime | None = None, - end_date: datetime | None = None, - session: Session = NEW_SESSION, - ) -> list[TaskInstance]: - """Get task instances related to this task for a specific date range.""" - from airflow.models import DagRun - - query = ( - select(TaskInstance) - .join(TaskInstance.dag_run) - .where(TaskInstance.dag_id == self.dag_id) - .where(TaskInstance.task_id == self.task_id) - ) - if start_date: - query = query.where(DagRun.logical_date >= start_date) - if end_date: - query = query.where(DagRun.logical_date <= end_date) - return session.scalars(query.order_by(DagRun.logical_date)).all() - - def dry_run(self) -> None: - """Perform dry run for the operator - just render template fields.""" - self.log.info("Dry run") - for field in self.template_fields: - try: - content = getattr(self, field) - except AttributeError: - raise AttributeError( - f"{field!r} is configured as a template field " - f"but {self.task_type} does not have this attribute." - ) - - if content and isinstance(content, str): - self.log.info("Rendering template for %s", field) - self.log.info(content) - - def get_direct_relatives(self, upstream: bool = False) -> Iterable[Operator]: - """Get list of the direct relatives to the current task, upstream or downstream.""" - if upstream: - return self.upstream_list - return self.downstream_list - - def serialize_for_task_group(self) -> tuple[DagAttributeTypes, Any]: - """Serialize; required by DAGNode.""" - return DagAttributeTypes.OP, self.task_id - - def unmap(self, resolve: None | dict[str, Any] | tuple[Context, Session]) -> BaseOperator: - """ - Get the "normal" operator from the current operator. - - Since a BaseOperator is not mapped to begin with, this simply returns - the original operator. - - :meta private: - """ - return self - - def expand_start_from_trigger(self, *, context: Context, session: Session) -> bool: - """ - Get the start_from_trigger value of the current abstract operator. - - Since a BaseOperator is not mapped to begin with, this simply returns - the original value of start_from_trigger. - - :meta private: - """ - return self.start_from_trigger - - def expand_start_trigger_args(self, *, context: Context, session: Session) -> StartTriggerArgs | None: - """ - Get the start_trigger_args value of the current abstract operator. - - Since a BaseOperator is not mapped to begin with, this simply returns - the original value of start_trigger_args. - - :meta private: - """ - return self.start_trigger_args - - if TYPE_CHECKING: - - @classmethod - def get_mapped_ti_count( - cls, node: DAGNode | MappedTaskGroup, run_id: str, *, session: Session - ) -> int: - """ - Return the number of mapped TaskInstances that can be created at run time. - - This considers both literal and non-literal mapped arguments, and the - result is therefore available when all depended tasks have finished. The - return value should be identical to ``parse_time_mapped_ti_count`` if - all mapped arguments are literal. - - :raise NotFullyPopulated: If upstream tasks are not all complete yet. - :raise NotMapped: If the operator is neither mapped, nor has any parent - mapped task groups. - :return: Total number of mapped TIs this task should have. - """ - else: - - @singledispatchmethod - @classmethod - def get_mapped_ti_count(cls, task: DAGNode, run_id: str, *, session: Session) -> int: - raise NotImplementedError(f"Not implemented for {type(task)}") - - # https://github.com/python/cpython/issues/86153 - # While we support Python 3.9 we can't rely on the type hint, we need to pass the type explicitly to - # register. - @get_mapped_ti_count.register(TaskSDKAbstractOperator) - @classmethod - def _(cls, task: TaskSDKAbstractOperator, run_id: str, *, session: Session) -> int: - group = task.get_closest_mapped_task_group() - if group is None: - raise NotMapped() - return cls.get_mapped_ti_count(group, run_id, session=session) - - @get_mapped_ti_count.register(MappedOperator) - @classmethod - def _(cls, task: MappedOperator, run_id: str, *, session: Session) -> int: - from airflow.serialization.serialized_objects import BaseSerialization, _ExpandInputRef - - exp_input = task._get_specified_expand_input() - if isinstance(exp_input, _ExpandInputRef): - exp_input = exp_input.deref(task.dag) - # TODO: TaskSDK This is only needed to support `dag.test()` etc until we port it over to use the - # task sdk runner. - if not hasattr(exp_input, "get_total_map_length"): - exp_input = _ExpandInputRef( - type(exp_input).EXPAND_INPUT_TYPE, - BaseSerialization.deserialize(BaseSerialization.serialize(exp_input.value)), - ) - exp_input = exp_input.deref(task.dag) - - current_count = exp_input.get_total_map_length(run_id, session=session) - - group = task.get_closest_mapped_task_group() - if group is None: - return current_count - parent_count = cls.get_mapped_ti_count(group, run_id, session=session) - return parent_count * current_count - - @get_mapped_ti_count.register(TaskGroup) - @classmethod - def _(cls, group: TaskGroup, run_id: str, *, session: Session) -> int: - """ - Return the number of instances a task in this group should be mapped to at run time. - - This considers both literal and non-literal mapped arguments, and the - result is therefore available when all depended tasks have finished. The - return value should be identical to ``parse_time_mapped_ti_count`` if - all mapped arguments are literal. - - If this group is inside mapped task groups, all the nested counts are - multiplied and accounted. - - :raise NotFullyPopulated: If upstream tasks are not all complete yet. - :return: Total number of mapped TIs this task should have. - """ - from airflow.serialization.serialized_objects import BaseSerialization, _ExpandInputRef - - def iter_mapped_task_group_lengths(group) -> Iterator[int]: - while group is not None: - if isinstance(group, MappedTaskGroup): - exp_input = group._expand_input - # TODO: TaskSDK This is only needed to support `dag.test()` etc until we port it over to use the - # task sdk runner. - if not hasattr(exp_input, "get_total_map_length"): - exp_input = _ExpandInputRef( - type(exp_input).EXPAND_INPUT_TYPE, - BaseSerialization.deserialize(BaseSerialization.serialize(exp_input.value)), - ) - exp_input = exp_input.deref(group.dag) - yield exp_input.get_total_map_length(run_id, session=session) - group = group.parent_group - - return functools.reduce(operator.mul, iter_mapped_task_group_lengths(group)) diff --git a/airflow-core/src/airflow/models/connection.py b/airflow-core/src/airflow/models/connection.py index a3ff23bbdea07..8628c592584be 100644 --- a/airflow-core/src/airflow/models/connection.py +++ b/airflow-core/src/airflow/models/connection.py @@ -487,7 +487,7 @@ def get_connection_from_secrets(cls, conn_id: str) -> Connection: return conn except AirflowRuntimeError as e: if e.error.error == ErrorType.CONNECTION_NOT_FOUND: - raise AirflowNotFoundException(f"The conn_id `{conn_id}` isn't defined") + raise AirflowNotFoundException(f"The conn_id `{conn_id}` isn't defined") from None raise # check cache first diff --git a/airflow-core/src/airflow/models/dag.py b/airflow-core/src/airflow/models/dag.py index 0332a2e825e24..b50957e667cdb 100644 --- a/airflow-core/src/airflow/models/dag.py +++ b/airflow-core/src/airflow/models/dag.py @@ -69,12 +69,12 @@ UnknownExecutorException, ) from airflow.executors.executor_loader import ExecutorLoader +from airflow.models import Deadline from airflow.models.asset import ( AssetDagRunQueue, AssetModel, ) from airflow.models.base import Base, StringID -from airflow.models.baseoperator import BaseOperator from airflow.models.dag_version import DagVersion from airflow.models.dagrun import RUN_ID_REGEX, DagRun from airflow.models.taskinstance import ( @@ -86,7 +86,7 @@ from airflow.sdk import TaskGroup from airflow.sdk.definitions.asset import Asset, AssetAlias, AssetUniqueKey, BaseAsset from airflow.sdk.definitions.dag import DAG as TaskSDKDag, dag as task_sdk_dag_decorator -from airflow.sdk.definitions.deadline import DeadlineAlert +from airflow.sdk.definitions.deadline import DeadlineAlert, DeadlineReference from airflow.settings import json from airflow.timetables.base import DagRunInfo, DataInterval, TimeRestriction, Timetable from airflow.timetables.interval import CronDataIntervalTimetable, DeltaDataIntervalTimetable @@ -105,14 +105,15 @@ from airflow.utils.types import DagRunTriggeredByType, DagRunType if TYPE_CHECKING: + from typing import Literal + from pydantic import NonNegativeInt from sqlalchemy.orm.query import Query from sqlalchemy.orm.session import Session from airflow.models.dagbag import DagBag - from airflow.models.operator import Operator + from airflow.sdk.types import Operator from airflow.serialization.serialized_objects import MaybeSerializedDAG - from airflow.typing_compat import Literal log = logging.getLogger(__name__) @@ -245,6 +246,9 @@ def _create_orm_dagrun( select(DagModel.bundle_version).where(DagModel.dag_id == dag.dag_id), ) dag_version = DagVersion.get_latest_version(dag.dag_id, session=session) + if not dag_version: + raise AirflowException(f"Cannot create DagRun for DAG {dag.dag_id} because the dag is not serialized") + run = DagRun( dag_id=dag.dag_id, run_id=run_id, @@ -270,7 +274,7 @@ def _create_orm_dagrun( run.dag = dag # create the associated task instances # state is None at the moment of creation - run.verify_integrity(session=session, dag_version_id=dag_version.id if dag_version else None) + run.verify_integrity(session=session, dag_version_id=dag_version.id) return run @@ -1219,9 +1223,10 @@ def set_task_group_state( :param session: new session """ from airflow.api.common.mark_tasks import set_state + from airflow.serialization.serialized_objects import SerializedBaseOperator as BaseOperator - tasks_to_set_state: list[BaseOperator | tuple[BaseOperator, int]] = [] - task_ids: list[str] = [] + tasks_to_set_state: list + task_ids: list[str] task_group_dict = self.task_group.get_task_group_dict() task_group = task_group_dict.get(group_id) @@ -1581,7 +1586,7 @@ def create_dagrun( if conf: copied_params.update(conf) copied_params.validate() - return _create_orm_dagrun( + orm_dagrun = _create_orm_dagrun( dag=self, run_id=run_id, logical_date=logical_date, @@ -1598,6 +1603,24 @@ def create_dagrun( session=session, ) + if self.deadline and isinstance(self.deadline.reference, DeadlineReference.TYPES.DAGRUN): + session.add( + Deadline( + deadline_time=self.deadline.reference.evaluate_with( + session=session, + interval=self.deadline.interval, + dag_id=self.dag_id, + run_id=run_id, + ), + callback=self.deadline.callback, + callback_kwargs=self.deadline.callback_kwargs or {}, + dag_id=self.dag_id, + dagrun_id=orm_dagrun.id, + ) + ) + + return orm_dagrun + @classmethod @provide_session def bulk_write_to_db( @@ -2009,7 +2032,7 @@ def deadline(self): @deadline.setter def deadline(self, value): """Set and serialize the deadline alert.""" - self._deadline = None if value is None else value.serialize_deadline_alert() + self._deadline = value if isinstance(value, dict) else value.serialize_deadline_alert() @property def timezone(self): diff --git a/airflow-core/src/airflow/api_fastapi/gunicorn_config.py b/airflow-core/src/airflow/models/dag_favorite.py similarity index 69% rename from airflow-core/src/airflow/api_fastapi/gunicorn_config.py rename to airflow-core/src/airflow/models/dag_favorite.py index f2c17eef76d4f..5dfb742fdaf80 100644 --- a/airflow-core/src/airflow/api_fastapi/gunicorn_config.py +++ b/airflow-core/src/airflow/models/dag_favorite.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -18,16 +17,15 @@ # under the License. from __future__ import annotations -import setproctitle +from sqlalchemy import Column, ForeignKey -from airflow import settings +from airflow.models.base import Base, StringID -def post_worker_init(_): - """ - Set process title. +class DagFavorite(Base): + """Association table model linking users to their favorite DAGs.""" - This is used by airflow.cli.commands.api_server_command to track the status of the worker. - """ - old_title = setproctitle.getproctitle() - setproctitle.setproctitle(settings.GUNICORN_WORKER_READY_PREFIX + old_title) + __tablename__ = "dag_favorite" + + user_id = Column(StringID(), primary_key=True) + dag_id = Column(StringID(), ForeignKey("dag.dag_id", ondelete="CASCADE"), primary_key=True) diff --git a/airflow-core/src/airflow/models/dagrun.py b/airflow-core/src/airflow/models/dagrun.py index 9415a3b0f7a34..a9f174875e623 100644 --- a/airflow-core/src/airflow/models/dagrun.py +++ b/airflow-core/src/airflow/models/dagrun.py @@ -91,22 +91,27 @@ if TYPE_CHECKING: from datetime import datetime + from typing import Literal from opentelemetry.sdk.trace import Span from pydantic import NonNegativeInt from sqlalchemy.orm import Query, Session from sqlalchemy.sql.elements import Case - from airflow.models.baseoperator import BaseOperator from airflow.models.dag import DAG from airflow.models.dag_version import DagVersion - from airflow.models.operator import Operator + from airflow.models.taskinstancekey import TaskInstanceKey from airflow.sdk import DAG as SDKDAG, Context - from airflow.typing_compat import Literal + from airflow.sdk.types import Operator + from airflow.serialization.serialized_objects import SerializedBaseOperator as BaseOperator from airflow.utils.types import ArgNotSet CreatedTasks = TypeVar("CreatedTasks", Iterator["dict[str, Any]"], Iterator[TI]) + AttributeValueType = ( + str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float] + ) + RUN_ID_REGEX = r"^(?:manual|scheduled|asset_triggered)__(?:\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\+00:00)$" @@ -980,12 +985,8 @@ def set_dagrun_span_attrs(self, span: Span | EmptySpan): if self._state == DagRunState.FAILED: span.set_attribute("airflow.dag_run.error", True) - attribute_value_type = ( - str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float] - ) - # Explicitly set the value type to Union[...] to avoid a mypy error. - attributes: dict[str, attribute_value_type] = { + attributes: dict[str, AttributeValueType] = { "airflow.category": "DAG runs", "airflow.dag_run.dag_id": str(self.dag_id), "airflow.dag_run.logical_date": str(self.logical_date), @@ -1376,7 +1377,7 @@ def _get_ready_tis( finished_tis: list[TI], session: Session, ) -> tuple[list[TI], bool, bool]: - old_states = {} + old_states: dict[TaskInstanceKey, Any] = {} ready_tis: list[TI] = [] changed_tis = False @@ -1428,7 +1429,7 @@ def _expand_mapped_task_if_needed(ti: TI) -> Iterable[TI] | None: # Check dependencies. expansion_happened = False # Set of task ids for which was already done _revise_map_indexes_if_mapped - revised_map_index_task_ids = set() + revised_map_index_task_ids: set[str] = set() for schedulable in itertools.chain(schedulable_tis, additional_tis): if TYPE_CHECKING: assert isinstance(schedulable.task, BaseOperator) @@ -1452,7 +1453,11 @@ def _expand_mapped_task_if_needed(ti: TI) -> Iterable[TI] | None: # It's enough to revise map index once per task id, # checking the map index for each mapped task significantly slows down scheduling if schedulable.task.task_id not in revised_map_index_task_ids: - ready_tis.extend(self._revise_map_indexes_if_mapped(schedulable.task, session=session)) + ready_tis.extend( + self._revise_map_indexes_if_mapped( + schedulable.task, dag_version_id=schedulable.dag_version_id, session=session + ) + ) revised_map_index_task_ids.add(schedulable.task.task_id) ready_tis.append(schedulable) @@ -1555,9 +1560,7 @@ def _emit_duration_stats_for_finished_state(self): Stats.timing(f"dagrun.duration.{self.state}", **timer_params) @provide_session - def verify_integrity( - self, *, session: Session = NEW_SESSION, dag_version_id: UUIDType | None = None - ) -> None: + def verify_integrity(self, *, session: Session = NEW_SESSION, dag_version_id: UUIDType) -> None: """ Verify the DagRun by checking for removed tasks or tasks that are not in the database yet. @@ -1611,8 +1614,8 @@ def _check_for_removed_or_restored_tasks( :return: Task IDs in the DAG run """ - from airflow.models.baseoperator import BaseOperator from airflow.models.expandinput import NotFullyPopulated + from airflow.models.mappedoperator import get_mapped_ti_count tis = self.get_task_instances(session=session) @@ -1649,7 +1652,7 @@ def _check_for_removed_or_restored_tasks( except NotFullyPopulated: # What if it is _now_ dynamically mapped, but wasn't before? try: - total_length = BaseOperator.get_mapped_ti_count(task, self.run_id, session=session) + total_length = get_mapped_ti_count(task, self.run_id, session=session) except NotFullyPopulated: # Not all upstreams finished, so we can't tell what should be here. Remove everything. if ti.map_index >= 0: @@ -1687,7 +1690,7 @@ def _get_task_creator( created_counts: dict[str, int], ti_mutation_hook: Callable, hook_is_noop: Literal[True], - dag_version_id: UUIDType | None, + dag_version_id: UUIDType, ) -> Callable[[Operator, Iterable[int]], Iterator[dict[str, Any]]]: ... @overload @@ -1696,7 +1699,7 @@ def _get_task_creator( created_counts: dict[str, int], ti_mutation_hook: Callable, hook_is_noop: Literal[False], - dag_version_id: UUIDType | None, + dag_version_id: UUIDType, ) -> Callable[[Operator, Iterable[int]], Iterator[TI]]: ... def _get_task_creator( @@ -1704,7 +1707,7 @@ def _get_task_creator( created_counts: dict[str, int], ti_mutation_hook: Callable, hook_is_noop: Literal[True, False], - dag_version_id: UUIDType | None, + dag_version_id: UUIDType, ) -> Callable[[Operator, Iterable[int]], Iterator[dict[str, Any]] | Iterator[TI]]: """ Get the task creator function. @@ -1752,13 +1755,13 @@ def _create_tasks( :param tasks: Tasks to create jobs for in the DAG run :param task_creator: Function to create task instances """ - from airflow.models.baseoperator import BaseOperator from airflow.models.expandinput import NotFullyPopulated + from airflow.models.mappedoperator import get_mapped_ti_count map_indexes: Iterable[int] for task in tasks: try: - count = BaseOperator.get_mapped_ti_count(task, self.run_id, session=session) + count = get_mapped_ti_count(task, self.run_id, session=session) except (NotMapped, NotFullyPopulated): map_indexes = (-1,) else: @@ -1815,7 +1818,9 @@ def _create_task_instances( # TODO[HA]: We probably need to savepoint this so we can keep the transaction alive. session.rollback() - def _revise_map_indexes_if_mapped(self, task: Operator, *, session: Session) -> Iterator[TI]: + def _revise_map_indexes_if_mapped( + self, task: Operator | BaseOperator, *, dag_version_id: UUIDType, session: Session + ) -> Iterator[TI]: """ Check if task increased or reduced in length and handle appropriately. @@ -1824,12 +1829,12 @@ def _revise_map_indexes_if_mapped(self, task: Operator, *, session: Session) -> we delay expansion to the "last resort". See comments at the call site for more details. """ - from airflow.models.baseoperator import BaseOperator from airflow.models.expandinput import NotFullyPopulated + from airflow.models.mappedoperator import get_mapped_ti_count from airflow.settings import task_instance_mutation_hook try: - total_length = BaseOperator.get_mapped_ti_count(task, self.run_id, session=session) + total_length = get_mapped_ti_count(task, self.run_id, session=session) except NotMapped: return # Not a mapped task, don't need to do anything. except NotFullyPopulated: @@ -1861,7 +1866,7 @@ def _revise_map_indexes_if_mapped(self, task: Operator, *, session: Session) -> for index in range(total_length): if index in existing_indexes: continue - ti = TI(task, run_id=self.run_id, map_index=index, state=None) + ti = TI(task, run_id=self.run_id, map_index=index, state=None, dag_version_id=dag_version_id) self.log.debug("Expanding TIs upserted %s", ti) task_instance_mutation_hook(ti) ti = session.merge(ti) @@ -1904,8 +1909,8 @@ def schedule_tis( """ # Get list of TI IDs that do not need to executed, these are # tasks using EmptyOperator and without on_execute_callback / on_success_callback - empty_ti_ids = [] - schedulable_ti_ids = [] + empty_ti_ids: list[str] = [] + schedulable_ti_ids: list[str] = [] for ti in schedulable_tis: if TYPE_CHECKING: assert isinstance(ti.task, BaseOperator) diff --git a/airflow-core/src/airflow/models/deadline.py b/airflow-core/src/airflow/models/deadline.py index 9891d3947ee69..a20f79a4cde8d 100644 --- a/airflow-core/src/airflow/models/deadline.py +++ b/airflow-core/src/airflow/models/deadline.py @@ -19,7 +19,7 @@ import logging from abc import ABC, abstractmethod from dataclasses import dataclass -from datetime import datetime +from datetime import datetime, timedelta from typing import TYPE_CHECKING, Any import sqlalchemy_jsonfield @@ -34,7 +34,7 @@ from airflow.utils import timezone from airflow.utils.decorators import classproperty from airflow.utils.log.logging_mixin import LoggingMixin -from airflow.utils.session import NEW_SESSION, provide_session +from airflow.utils.session import provide_session from airflow.utils.sqlalchemy import UtcDateTime if TYPE_CHECKING: @@ -98,12 +98,6 @@ def _determine_resource() -> tuple[str, str]: f"{self.deadline_time} or run: {self.callback}({callback_kwargs})" ) - @classmethod - @provide_session - def add_deadline(cls, deadline: Deadline, session: Session = NEW_SESSION): - """Add the provided deadline to the table.""" - session.add(deadline) - class ReferenceModels: """ @@ -143,7 +137,7 @@ class BaseDeadlineReference(LoggingMixin, ABC): def reference_name(cls: Any) -> str: return cls.__name__ - def evaluate_with(self, **kwargs: Any) -> datetime: + def evaluate_with(self, *, session: Session, interval: timedelta, **kwargs: Any) -> datetime: """Validate the provided kwargs and evaluate this deadline with the given conditions.""" filtered_kwargs = {k: v for k, v in kwargs.items() if k in self.required_kwargs} @@ -155,10 +149,10 @@ def evaluate_with(self, **kwargs: Any) -> datetime: if extra_kwargs := kwargs.keys() - filtered_kwargs.keys(): self.log.debug("Ignoring unexpected parameters: %s", ", ".join(extra_kwargs)) - return self._evaluate_with(**filtered_kwargs) + return self._evaluate_with(session=session, **filtered_kwargs) + interval @abstractmethod - def _evaluate_with(self, **kwargs: Any) -> datetime: + def _evaluate_with(self, *, session: Session, **kwargs: Any) -> datetime: """Must be implemented by subclasses to perform the actual evaluation.""" raise NotImplementedError @@ -192,7 +186,7 @@ class FixedDatetimeDeadline(BaseDeadlineReference): _datetime: datetime - def _evaluate_with(self, **kwargs: Any) -> datetime: + def _evaluate_with(self, *, session: Session, **kwargs: Any) -> datetime: return self._datetime def serialize_reference(self) -> dict: @@ -208,22 +202,23 @@ def deserialize_reference(cls, reference_data: dict): class DagRunLogicalDateDeadline(BaseDeadlineReference): """A deadline that returns a DagRun's logical date.""" - required_kwargs = {"dag_id"} + required_kwargs = {"dag_id", "run_id"} - def _evaluate_with(self, **kwargs: Any) -> datetime: + def _evaluate_with(self, *, session: Session, **kwargs: Any) -> datetime: from airflow.models import DagRun - return _fetch_from_db(DagRun.logical_date, **kwargs) + return _fetch_from_db(DagRun.logical_date, session=session, **kwargs) class DagRunQueuedAtDeadline(BaseDeadlineReference): """A deadline that returns when a DagRun was queued.""" - required_kwargs = {"dag_id"} + required_kwargs = {"dag_id", "run_id"} - def _evaluate_with(self, **kwargs: Any) -> datetime: + @provide_session + def _evaluate_with(self, *, session: Session, **kwargs: Any) -> datetime: from airflow.models import DagRun - return _fetch_from_db(DagRun.queued_at, **kwargs) + return _fetch_from_db(DagRun.queued_at, session=session, **kwargs) DeadlineReferenceType = ReferenceModels.BaseDeadlineReference diff --git a/airflow-core/src/airflow/models/expandinput.py b/airflow-core/src/airflow/models/expandinput.py index 803a8fe294c6d..6aa44316f6e77 100644 --- a/airflow-core/src/airflow/models/expandinput.py +++ b/airflow-core/src/airflow/models/expandinput.py @@ -25,10 +25,11 @@ import attrs if TYPE_CHECKING: + from typing import TypeGuard + from sqlalchemy.orm import Session from airflow.models.xcom_arg import SchedulerXComArg - from airflow.typing_compat import TypeGuard from airflow.sdk.definitions._internal.expandinput import ( DictOfListsExpandInput, diff --git a/airflow-core/src/airflow/models/hitl.py b/airflow-core/src/airflow/models/hitl.py new file mode 100644 index 0000000000000..9d060ba1c19d7 --- /dev/null +++ b/airflow-core/src/airflow/models/hitl.py @@ -0,0 +1,69 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import sqlalchemy_jsonfield +from sqlalchemy import Boolean, Column, ForeignKeyConstraint, String, Text +from sqlalchemy.dialects import postgresql +from sqlalchemy.ext.hybrid import hybrid_property + +from airflow.models.base import Base +from airflow.settings import json +from airflow.utils.sqlalchemy import UtcDateTime + + +class HITLDetail(Base): + """Human-in-the-loop request and corresponding response.""" + + __tablename__ = "hitl_detail" + ti_id = Column( + String(36).with_variant(postgresql.UUID(as_uuid=False), "postgresql"), + primary_key=True, + nullable=False, + ) + + # User Request Detail + options = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False) + subject = Column(Text, nullable=False) + body = Column(Text, nullable=True) + defaults = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) + multiple = Column(Boolean, unique=False, default=False) + params = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}) + + # Response Content Detail + response_at = Column(UtcDateTime, nullable=True) + user_id = Column(String(128), nullable=True) + chosen_options = Column( + sqlalchemy_jsonfield.JSONField(json=json), + nullable=True, + default=None, + ) + params_input = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}) + + __table_args__ = ( + ForeignKeyConstraint( + (ti_id,), + ["task_instance.id"], + name="hitl_detail_ti_fkey", + ondelete="CASCADE", + onupdate="CASCADE", + ), + ) + + @hybrid_property + def response_received(self) -> bool: + return self.response_at is not None diff --git a/airflow-core/src/airflow/models/mappedoperator.py b/airflow-core/src/airflow/models/mappedoperator.py index 205e8da16dd8d..8612064264cc1 100644 --- a/airflow-core/src/airflow/models/mappedoperator.py +++ b/airflow-core/src/airflow/models/mappedoperator.py @@ -17,23 +17,31 @@ # under the License. from __future__ import annotations -from functools import cached_property +import functools +import operator from typing import TYPE_CHECKING, Any import attrs import structlog +from sqlalchemy.orm import Session from airflow.exceptions import AirflowException +from airflow.sdk.bases.operator import BaseOperator as TaskSDKBaseOperator +from airflow.sdk.definitions._internal.abstractoperator import NotMapped from airflow.sdk.definitions.mappedoperator import MappedOperator as TaskSDKMappedOperator -from airflow.triggers.base import StartTriggerArgs +from airflow.sdk.definitions.taskgroup import MappedTaskGroup, TaskGroup +from airflow.serialization.serialized_objects import DEFAULT_OPERATOR_DEPS, SerializedBaseOperator from airflow.utils.helpers import prevent_duplicates if TYPE_CHECKING: - from sqlalchemy.orm.session import Session + from collections.abc import Iterator from airflow.models import TaskInstance + from airflow.models.dag import DAG as SchedulerDAG from airflow.sdk import BaseOperatorLink + from airflow.sdk.definitions._internal.node import DAGNode from airflow.sdk.definitions.context import Context + from airflow.ti_deps.deps.base_ti_dep import BaseTIDep log = structlog.get_logger(__name__) @@ -53,6 +61,8 @@ class MappedOperator(TaskSDKMappedOperator): # type: ignore[misc] # It complains about weight_rule being different """Object representing a mapped operator in a DAG.""" + deps: frozenset[BaseTIDep] = attrs.field(init=False, default=DEFAULT_OPERATOR_DEPS) + def expand_start_from_trigger(self, *, context: Context, session: Session) -> bool: """ Get the start_from_trigger value of the current abstract operator. @@ -86,44 +96,7 @@ def expand_start_from_trigger(self, *, context: Context, session: Session) -> bo "start_from_trigger", self.partial_kwargs.get("start_from_trigger", self.start_from_trigger) ) - def expand_start_trigger_args(self, *, context: Context, session: Session) -> StartTriggerArgs | None: - """ - Get the kwargs to create the unmapped start_trigger_args. - - This method is for allowing mapped operator to start execution from triggerer. - """ - if not self.start_trigger_args: - return None - - mapped_kwargs, _ = self._expand_mapped_kwargs(context) - if self._disallow_kwargs_override: - prevent_duplicates( - self.partial_kwargs, - mapped_kwargs, - fail_reason="unmappable or already specified", - ) - - # Ordering is significant; mapped kwargs should override partial ones. - trigger_kwargs = mapped_kwargs.get( - "trigger_kwargs", - self.partial_kwargs.get("trigger_kwargs", self.start_trigger_args.trigger_kwargs), - ) - next_kwargs = mapped_kwargs.get( - "next_kwargs", - self.partial_kwargs.get("next_kwargs", self.start_trigger_args.next_kwargs), - ) - timeout = mapped_kwargs.get( - "trigger_timeout", self.partial_kwargs.get("trigger_timeout", self.start_trigger_args.timeout) - ) - return StartTriggerArgs( - trigger_cls=self.start_trigger_args.trigger_cls, - trigger_kwargs=trigger_kwargs, - next_method=self.start_trigger_args.next_method, - next_kwargs=next_kwargs, - timeout=timeout, - ) - - @cached_property + @functools.cached_property def operator_extra_link_dict(self) -> dict[str, BaseOperatorLink]: """Returns dictionary of all extra links for the operator.""" op_extra_links_from_plugin: dict[str, Any] = {} @@ -143,7 +116,7 @@ def operator_extra_link_dict(self) -> dict[str, BaseOperatorLink]: return operator_extra_links_all - @cached_property + @functools.cached_property def global_operator_extra_link_dict(self) -> dict[str, Any]: """Returns dictionary of all global extra links.""" from airflow import plugins_manager @@ -153,7 +126,7 @@ def global_operator_extra_link_dict(self) -> dict[str, Any]: raise AirflowException("Can't load operators") return {link.name: link for link in plugins_manager.global_operator_extra_links} - @cached_property + @functools.cached_property def extra_links(self) -> list[str]: return sorted(set(self.operator_extra_link_dict).union(self.global_operator_extra_link_dict)) @@ -173,3 +146,87 @@ def get_extra_links(self, ti: TaskInstance, name: str) -> str | None: if not link: return None return link.get_link(self, ti_key=ti.key) # type: ignore[arg-type] + + +@functools.singledispatch +def get_mapped_ti_count(task: DAGNode, run_id: str, *, session: Session) -> int: + raise NotImplementedError(f"Not implemented for {type(task)}") + + +# Still accept TaskSDKBaseOperator because some tests don't go through serialization. +# TODO (GH-52141): Rewrite tests so we can drop SDK references at some point. +@get_mapped_ti_count.register(SerializedBaseOperator) +@get_mapped_ti_count.register(TaskSDKBaseOperator) +def _(task: SerializedBaseOperator | TaskSDKBaseOperator, run_id: str, *, session: Session) -> int: + group = task.get_closest_mapped_task_group() + if group is None: + raise NotMapped() + return get_mapped_ti_count(group, run_id, session=session) + + +# Still accept TaskSDKMappedOperator because some tests don't go through serialization. +# TODO (GH-52141): Rewrite tests so we can drop SDK references at some point. +@get_mapped_ti_count.register(MappedOperator) +@get_mapped_ti_count.register(TaskSDKMappedOperator) +def _(task: MappedOperator | TaskSDKMappedOperator, run_id: str, *, session: Session) -> int: + from airflow.serialization.serialized_objects import BaseSerialization, _ExpandInputRef + + exp_input = task._get_specified_expand_input() + if isinstance(exp_input, _ExpandInputRef): + exp_input = exp_input.deref(task.dag) + # TODO (GH-52141): 'task' here should be scheduler-bound and returns scheduler expand input. + if not hasattr(exp_input, "get_total_map_length"): + if TYPE_CHECKING: + assert isinstance(task.dag, SchedulerDAG) + current_count = ( + _ExpandInputRef( + exp_input.EXPAND_INPUT_TYPE, + BaseSerialization.deserialize(BaseSerialization.serialize(exp_input.value)), + ) + .deref(task.dag) + .get_total_map_length(run_id, session=session) + ) + else: + current_count = exp_input.get_total_map_length(run_id, session=session) + + group = task.get_closest_mapped_task_group() + if group is None: + return current_count + parent_count = get_mapped_ti_count(group, run_id, session=session) + return parent_count * current_count + + +@get_mapped_ti_count.register +def _(group: TaskGroup, run_id: str, *, session: Session) -> int: + """ + Return the number of instances a task in this group should be mapped to at run time. + + This considers both literal and non-literal mapped arguments, and the + result is therefore available when all depended tasks have finished. The + return value should be identical to ``parse_time_mapped_ti_count`` if + all mapped arguments are literal. + + If this group is inside mapped task groups, all the nested counts are + multiplied and accounted. + + :raise NotFullyPopulated: If upstream tasks are not all complete yet. + :return: Total number of mapped TIs this task should have. + """ + from airflow.serialization.serialized_objects import BaseSerialization, _ExpandInputRef + + def iter_mapped_task_group_lengths(group) -> Iterator[int]: + while group is not None: + if isinstance(group, MappedTaskGroup): + exp_input = group._expand_input + # TODO (GH-52141): 'group' here should be scheduler-bound and returns scheduler expand input. + if not hasattr(exp_input, "get_total_map_length"): + if TYPE_CHECKING: + assert isinstance(group.dag, SchedulerDAG) + exp_input = _ExpandInputRef( + exp_input.EXPAND_INPUT_TYPE, + BaseSerialization.deserialize(BaseSerialization.serialize(exp_input.value)), + ).deref(group.dag) + yield exp_input.get_total_map_length(run_id, session=session) + group = group.parent_group + + return functools.reduce(operator.mul, iter_mapped_task_group_lengths(group)) diff --git a/airflow-core/src/airflow/models/renderedtifields.py b/airflow-core/src/airflow/models/renderedtifields.py index c971f391d9e24..9e3fb57cb2816 100644 --- a/airflow-core/src/airflow/models/renderedtifields.py +++ b/airflow-core/src/airflow/models/renderedtifields.py @@ -48,10 +48,10 @@ from sqlalchemy.sql import FromClause from airflow.models.taskinstance import TaskInstance, TaskInstanceKey - from airflow.sdk.types import Operator + from airflow.serialization.serialized_objects import SerializedBaseOperator -def get_serialized_template_fields(task: Operator): +def get_serialized_template_fields(task: SerializedBaseOperator): """ Get and serialize the template fields for a task. @@ -125,7 +125,7 @@ def __init__(self, ti: TaskInstance, render_templates=True, rendered_fields=None ti.render_templates() if TYPE_CHECKING: - assert ti.task + assert isinstance(ti.task, SerializedBaseOperator) self.task = ti.task if os.environ.get("AIRFLOW_IS_K8S_EXECUTOR_POD", None): diff --git a/airflow-core/src/airflow/models/taskinstance.py b/airflow-core/src/airflow/models/taskinstance.py index 9044817c851c0..1e22967429d23 100644 --- a/airflow-core/src/airflow/models/taskinstance.py +++ b/airflow-core/src/airflow/models/taskinstance.py @@ -24,8 +24,9 @@ import math import operator import os +import uuid from collections import defaultdict -from collections.abc import Collection, Generator, Iterable, Sequence +from collections.abc import Collection, Iterable from datetime import timedelta from functools import cache from typing import TYPE_CHECKING, Any @@ -83,8 +84,7 @@ from airflow.models.taskinstancekey import TaskInstanceKey from airflow.models.taskmap import TaskMap from airflow.models.taskreschedule import TaskReschedule -from airflow.models.xcom import LazyXComSelectSequence, XComModel -from airflow.plugins_manager import integrate_macros_plugins +from airflow.models.xcom import XCOM_RETURN_KEY, LazyXComSelectSequence, XComModel from airflow.settings import task_instance_mutation_hook from airflow.stats import Stats from airflow.ti_deps.dep_context import DepContext @@ -100,7 +100,6 @@ from airflow.utils.span_status import SpanStatus from airflow.utils.sqlalchemy import ExecutorConfigType, ExtendedJSON, UtcDateTime from airflow.utils.state import DagRunState, State, TaskInstanceState -from airflow.utils.xcom import XCOM_RETURN_KEY TR = TaskReschedule @@ -109,6 +108,7 @@ if TYPE_CHECKING: from datetime import datetime + from typing import Literal, TypeAlias import pendulum from sqlalchemy.engine import Connection as SAConnection, Engine @@ -117,19 +117,21 @@ from sqlalchemy.sql.elements import BooleanClauseList from sqlalchemy.sql.expression import ColumnOperators - from airflow.models.baseoperator import BaseOperator from airflow.models.dag import DAG as SchedulerDAG, DagModel from airflow.models.dagrun import DagRun + from airflow.sdk import BaseOperator from airflow.sdk.api.datamodels._generated import AssetProfile - from airflow.sdk.definitions._internal.abstractoperator import Operator, TaskStateChangeCallback from airflow.sdk.definitions.asset import AssetNameRef, AssetUniqueKey, AssetUriRef from airflow.sdk.definitions.dag import DAG + from airflow.sdk.definitions.mappedoperator import MappedOperator from airflow.sdk.definitions.taskgroup import MappedTaskGroup from airflow.sdk.types import RuntimeTaskInstanceProtocol - from airflow.typing_compat import Literal + from airflow.serialization.serialized_objects import SerializedBaseOperator from airflow.utils.context import Context from airflow.utils.task_group import TaskGroup + Operator: TypeAlias = BaseOperator | MappedOperator + PAST_DEPENDS_MET = "past_depends_met" @@ -156,28 +158,6 @@ def _add_log( ) -@contextlib.contextmanager -def set_current_context(context: Context) -> Generator[Context, None, None]: - """ - Set the current execution context to the provided context object. - - This method should be called once per Task execution, before calling operator.execute. - """ - from airflow.sdk.definitions._internal.contextmanager import _CURRENT_CONTEXT - - _CURRENT_CONTEXT.append(context) - try: - yield context - finally: - expected_state = _CURRENT_CONTEXT.pop() - if expected_state != context: - log.warning( - "Current context is not equal to the state at context stack. Expected=%s, got=%s", - context, - expected_state, - ) - - def _stop_remaining_tasks(*, task_instance: TaskInstance, task_teardown_map=None, session: Session): """ Stop non-teardown tasks in dag. @@ -412,38 +392,6 @@ def render(key: str, content: str) -> str: return subject, html_content, html_content_err -def _run_finished_callback( - *, - callbacks: None | TaskStateChangeCallback | Sequence[TaskStateChangeCallback], - context: Context, -) -> None: - """ - Run callback after task finishes. - - :param callbacks: callbacks to run - :param context: callbacks context - - :meta private: - """ - if callbacks: - callbacks = callbacks if isinstance(callbacks, Sequence) else [callbacks] - - def get_callback_representation(callback: TaskStateChangeCallback) -> Any: - with contextlib.suppress(AttributeError): - return callback.__name__ - with contextlib.suppress(AttributeError): - return callback.__class__.__name__ - return callback - - for idx, callback in enumerate(callbacks): - callback_repr = get_callback_representation(callback) - log.info("Executing callback at index %d: %s", idx, callback_repr) - try: - callback(context) - except Exception: - log.exception("Error in callback at index %d: %s", idx, callback_repr) - - def _log_state(*, task_instance: TaskInstance, lead_msg: str = "") -> None: """ Log task state. @@ -566,8 +514,7 @@ class TaskInstance(Base, LoggingMixin): _task_display_property_value = Column("task_display_name", String(2000), nullable=True) dag_version_id = Column( - UUIDType(binary=False), - ForeignKey("dag_version.id", ondelete="RESTRICT"), + UUIDType(binary=False), ForeignKey("dag_version.id", ondelete="RESTRICT"), nullable=False ) dag_version = relationship("DagVersion", back_populates="task_instances") @@ -618,7 +565,7 @@ class TaskInstance(Base, LoggingMixin): ) note = association_proxy("task_instance_note", "content", creator=_creator_note) - task: Operator | None = None + task: Operator | SerializedBaseOperator | None = None test_mode: bool = False is_trigger_log_context: bool = False run_as_user: str | None = None @@ -631,11 +578,11 @@ class TaskInstance(Base, LoggingMixin): def __init__( self, - task: Operator, + task: Operator | SerializedBaseOperator, + dag_version_id: UUIDType | uuid.UUID, run_id: str | None = None, state: str | None = None, map_index: int = -1, - dag_version_id: UUIDType | None = None, ): super().__init__() self.dag_id = task.dag_id @@ -645,7 +592,6 @@ def __init__( self.refresh_from_task(task) if TYPE_CHECKING: assert self.task - # init_on_load will config the log self.init_on_load() @@ -675,7 +621,7 @@ def stats_tags(self) -> dict[str, str]: @staticmethod def insert_mapping( - run_id: str, task: Operator, map_index: int, dag_version_id: UUIDType | None + run_id: str, task: Operator, map_index: int, dag_version_id: UUIDType ) -> dict[str, Any]: """ Insert mapping. @@ -683,7 +629,7 @@ def insert_mapping( :meta private: """ priority_weight = task.weight_rule.get_weight( - TaskInstance(task=task, run_id=run_id, map_index=map_index) + TaskInstance(task=task, run_id=run_id, map_index=map_index, dag_version_id=dag_version_id) ) return { @@ -738,6 +684,7 @@ def from_runtime_ti(cls, runtime_ti: RuntimeTaskInstanceProtocol) -> TaskInstanc run_id=runtime_ti.run_id, task=runtime_ti.task, # type: ignore[arg-type] map_index=runtime_ti.map_index, + dag_version_id=runtime_ti.dag_version_id, ) if TYPE_CHECKING: @@ -760,6 +707,7 @@ def to_runtime_ti(self, context_from_server) -> RuntimeTaskInstanceProtocol: hostname=self.hostname, _ti_context_from_server=context_from_server, start_date=self.start_date, + dag_version_id=self.dag_version_id, ) return runtime_ti @@ -881,7 +829,11 @@ def refresh_from_db( else: self.state = None - def refresh_from_task(self, task: Operator, pool_override: str | None = None) -> None: + def refresh_from_task( + self, + task: Operator | SerializedBaseOperator, + pool_override: str | None = None, + ) -> None: """ Copy common attributes from the given task. @@ -1074,18 +1026,10 @@ def get_failed_dep_statuses(self, dep_context: DepContext | None = None, session if TYPE_CHECKING: assert isinstance(self.task, BaseOperator) - if not hasattr(self.task, "deps"): - # These deps are not on BaseOperator since they are only needed and evaluated - # in the scheduler and not needed at the Runtime. - from airflow.serialization.serialized_objects import SerializedBaseOperator - - serialized_op = SerializedBaseOperator.deserialize_operator( - SerializedBaseOperator.serialize_operator(self.task) - ) - setattr(self.task, "deps", serialized_op.deps) # type: ignore[union-attr] + from airflow.serialization.serialized_objects import create_scheduler_operator dep_context = dep_context or DepContext() - for dep in dep_context.deps | self.task.deps: + for dep in dep_context.deps | create_scheduler_operator(self.task).deps: for dep_status in dep.get_dep_statuses(self, session, dep_context): self.log.debug( "%s dependency '%s' PASSED: %s, %s", @@ -1595,7 +1539,7 @@ def defer_task(self, exception: TaskDeferred | None, session: Session = NEW_SESS # TODO: TaskSDK add start_trigger_args to SDK definitions if TYPE_CHECKING: - assert self.task is None or isinstance(self.task, BaseOperator) + assert self.task is not None timeout: timedelta | None if exception is not None: @@ -1605,7 +1549,7 @@ def defer_task(self, exception: TaskDeferred | None, session: Session = NEW_SESS timeout = exception.timeout elif self.task is not None and self.task.start_trigger_args is not None: context = self.get_template_context() - start_trigger_args = self.task.expand_start_trigger_args(context=context, session=session) + start_trigger_args = self.task.expand_start_trigger_args(context=context) if start_trigger_args is None: raise TaskDeferralError( "A none 'None' start_trigger_args has been change to 'None' during expandion" @@ -1696,8 +1640,7 @@ def run( def dry_run(self) -> None: """Only Renders Templates for the TI.""" if TYPE_CHECKING: - assert self.task - + assert isinstance(self.task, (BaseOperator, MappedOperator)) self.task = self.task.prepare_for_execution() self.render_templates() if TYPE_CHECKING: @@ -1708,10 +1651,8 @@ def dry_run(self) -> None: def fetch_handle_failure_context( cls, ti: TaskInstance, - error: None | str | BaseException, + error: None | str, test_mode: bool | None = None, - context: Context | None = None, - force_fail: bool = False, *, session: Session, fail_fast: bool = False, @@ -1722,8 +1663,6 @@ def fetch_handle_failure_context( :param ti: TaskInstance :param error: if specified, log the specific exception if thrown :param test_mode: doesn't record success or failure in the DB if True - :param context: Jinja2 context - :param force_fail: if True, task does not retry :param session: SQLAlchemy ORM Session :param fail_fast: if True, fail all downstream tasks """ @@ -1745,8 +1684,9 @@ def fetch_handle_failure_context( ti.clear_next_method_args() + context = None # In extreme cases (task instance heartbeat timeout in case of dag with parse error) we might _not_ have a Task. - if context is None and getattr(ti, "task", None): + if getattr(ti, "task", None): context = ti.get_template_context(session) if context is not None: @@ -1766,14 +1706,13 @@ def fetch_handle_failure_context( task: BaseOperator | None = None try: - if getattr(ti, "task", None) and context: - if TYPE_CHECKING: - assert isinstance(ti.task, BaseOperator) - task = ti.task.unmap((context, session)) + if (orig_task := getattr(ti, "task", None)) and context: + # TODO (GH-52141): Move runtime unmap into task runner. + task = orig_task.unmap((context, session)) except Exception: cls.logger().error("Unable to unmap task to determine if we need to send an alert email") - if force_fail or not ti.is_eligible_to_retry(): + if not ti.is_eligible_to_retry(): ti.state = TaskInstanceState.FAILED email_for_state = operator.attrgetter("email_on_failure") callbacks = task.on_failure_callback if task else None @@ -1817,20 +1756,16 @@ def save_to_db(ti: TaskInstance, session: Session = NEW_SESSION): @provide_session def handle_failure( self, - error: None | str | BaseException, + error: None | str, test_mode: bool | None = None, - context: Context | None = None, - force_fail: bool = False, session: Session = NEW_SESSION, ) -> None: """ Handle Failure for a task instance. :param error: if specified, log the specific exception if thrown - :param session: SQLAlchemy ORM Session :param test_mode: doesn't record success or failure in the DB if True - :param context: Jinja2 context - :param force_fail: if True, task does not retry + :param session: SQLAlchemy ORM Session """ if TYPE_CHECKING: assert self.task @@ -1845,13 +1780,11 @@ def handle_failure( ti=self, # type: ignore[arg-type] error=error, test_mode=test_mode, - context=context, - force_fail=force_fail, session=session, fail_fast=fail_fast, ) - _log_state(task_instance=self, lead_msg="Immediate failure requested. " if force_fail else "") + _log_state(task_instance=self) if ( failure_context["task"] and failure_context["email_for_state"](failure_context["task"]) @@ -1862,12 +1795,6 @@ def handle_failure( except Exception: log.exception("Failed to send email to: %s", failure_context["task"].email) - if failure_context["callbacks"] and failure_context["context"]: - _run_finished_callback( - callbacks=failure_context["callbacks"], - context=failure_context["context"], - ) - if not test_mode: TaskInstance.save_to_db(failure_context["ti"], session) @@ -1898,22 +1825,22 @@ def get_template_context( :param session: SQLAlchemy ORM Session :param ignore_param_exceptions: flag to suppress value exceptions while initializing the ParamsDict """ - if TYPE_CHECKING: - assert self.task - assert isinstance(self.task.dag, SchedulerDAG) - # Do not use provide_session here -- it expunges everything on exit! if not session: session = settings.Session() - from airflow import macros - from airflow.models.abstractoperator import NotMapped - from airflow.models.baseoperator import BaseOperator + if TYPE_CHECKING: + assert session + assert isinstance(self.task, (BaseOperator, MappedOperator)) + assert self.task.dag + + from airflow.models.mappedoperator import get_mapped_ti_count from airflow.sdk.api.datamodels._generated import ( DagRun as DagRunSDK, PrevSuccessfulDagRunResponse, TIRunContext, ) + from airflow.sdk.definitions._internal.abstractoperator import NotMapped from airflow.sdk.definitions.param import process_params from airflow.sdk.execution_time.context import InletEventsAccessors from airflow.utils.context import ( @@ -1922,15 +1849,6 @@ def get_template_context( VariableAccessor, ) - integrate_macros_plugins() - - task = self.task - if TYPE_CHECKING: - assert self.task - assert task - assert task.dag - assert session - def _get_dagrun(session: Session) -> DagRun: dag_run = self.get_dagrun(session) if dag_run in session: @@ -1948,11 +1866,11 @@ def _get_dagrun(session: Session) -> DagRun: return dag_run return session.merge(dag_run, load=False) + task = self.task + dag = self.task.dag dag_run = _get_dagrun(session) - validated_params = process_params( - self.task.dag, task, dag_run.conf, suppress_exception=ignore_param_exceptions - ) + validated_params = process_params(dag, task, dag_run.conf, suppress_exception=ignore_param_exceptions) ti_context_from_server = TIRunContext( dag_run=DagRunSDK.model_validate(dag_run, from_attributes=True), max_tries=self.max_tries, @@ -1999,7 +1917,6 @@ def get_triggering_events() -> dict[str, list[AssetEvent]]: { "outlet_events": OutletEventAccessors(), "inlet_events": InletEventsAccessors(task.inlets), - "macros": macros, "params": validated_params, "prev_data_interval_start_success": get_prev_data_interval_start_success(), "prev_data_interval_end_success": get_prev_data_interval_end_success(), @@ -2019,9 +1936,7 @@ def get_triggering_events() -> dict[str, list[AssetEvent]]: ) try: - expanded_ti_count: int | None = BaseOperator.get_mapped_ti_count( - task, self.run_id, session=session - ) + expanded_ti_count: int | None = get_mapped_ti_count(task, self.run_id, session=session) context["expanded_ti_count"] = expanded_ti_count if expanded_ti_count: setattr( @@ -2060,9 +1975,9 @@ def render_templates( ti = context["ti"] if TYPE_CHECKING: - assert original_task - assert self.task - assert ti.task + assert isinstance(original_task, (BaseOperator, MappedOperator)) + assert isinstance(self.task, (BaseOperator, MappedOperator)) + assert isinstance(ti.task, (BaseOperator, MappedOperator)) # If self.task is mapped, this call replaces self.task to point to the # unmapped BaseOperator created by this function! This is because the @@ -2380,10 +2295,10 @@ def tg2(inp): :return: Specific map index or map indexes to pull, or ``None`` if we want to "whole" return value (i.e. no mapped task groups involved). """ - from airflow.models.baseoperator import BaseOperator + from airflow.models.mappedoperator import get_mapped_ti_count if TYPE_CHECKING: - assert self.task + assert isinstance(self.task, (BaseOperator, MappedOperator)) # This value should never be None since we already know the current task # is in a mapped task group, and should have been expanded, despite that, @@ -2405,7 +2320,7 @@ def tg2(inp): # should use a "partial" value. Let's break down the mapped ti count # between the ancestor and further expansion happened inside it. - ancestor_ti_count = BaseOperator.get_mapped_ti_count(common_ancestor, self.run_id, session=session) + ancestor_ti_count = get_mapped_ti_count(common_ancestor, self.run_id, session=session) ancestor_map_index = self.map_index * ancestor_ti_count // ti_count # If the task is NOT further expanded inside the common ancestor, we diff --git a/airflow-core/src/airflow/models/taskmap.py b/airflow-core/src/airflow/models/taskmap.py index f0fd4c0231b70..df2107619008d 100644 --- a/airflow-core/src/airflow/models/taskmap.py +++ b/airflow-core/src/airflow/models/taskmap.py @@ -130,20 +130,21 @@ def expand_mapped_task(cls, task, run_id: str, *, session: Session) -> tuple[Seq :return: The newly created mapped task instances (if any) in ascending order by map index, and the maximum map index value. """ - from airflow.models.baseoperator import BaseOperator as DBBaseOperator from airflow.models.expandinput import NotFullyPopulated + from airflow.models.mappedoperator import get_mapped_ti_count from airflow.models.taskinstance import TaskInstance from airflow.sdk.bases.operator import BaseOperator from airflow.sdk.definitions.mappedoperator import MappedOperator + from airflow.serialization.serialized_objects import SerializedBaseOperator from airflow.settings import task_instance_mutation_hook - if not isinstance(task, (BaseOperator, MappedOperator)): + if not isinstance(task, (BaseOperator, MappedOperator, SerializedBaseOperator)): raise RuntimeError( f"cannot expand unrecognized operator type {type(task).__module__}.{type(task).__name__}" ) try: - total_length: int | None = DBBaseOperator.get_mapped_ti_count(task, run_id, session=session) + total_length: int | None = get_mapped_ti_count(task, run_id, session=session) except NotFullyPopulated as e: if not task.dag or not task.dag.partial: task.log.error( diff --git a/airflow-core/src/airflow/models/taskmixin.py b/airflow-core/src/airflow/models/taskmixin.py index 7aa8f63ba3c9d..61494c70f19f2 100644 --- a/airflow-core/src/airflow/models/taskmixin.py +++ b/airflow-core/src/airflow/models/taskmixin.py @@ -19,7 +19,7 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from airflow.typing_compat import TypeAlias + from typing import TypeAlias import airflow.sdk.definitions._internal.mixins import airflow.sdk.definitions._internal.node diff --git a/airflow-core/src/airflow/models/trigger.py b/airflow-core/src/airflow/models/trigger.py index 94d8360edf9e7..a4c54f8597436 100644 --- a/airflow-core/src/airflow/models/trigger.py +++ b/airflow-core/src/airflow/models/trigger.py @@ -25,14 +25,14 @@ from typing import TYPE_CHECKING, Any from sqlalchemy import Column, Integer, String, Text, delete, func, or_, select, update -from sqlalchemy.orm import relationship, selectinload +from sqlalchemy.orm import Session, relationship, selectinload from sqlalchemy.sql.functions import coalesce from airflow.assets.manager import AssetManager from airflow.models.asset import asset_trigger_association_table from airflow.models.base import Base from airflow.models.taskinstance import TaskInstance -from airflow.triggers import base as events +from airflow.triggers.base import BaseTaskEndEvent from airflow.utils import timezone from airflow.utils.retries import run_with_db_retries from airflow.utils.session import NEW_SESSION, provide_session @@ -40,10 +40,9 @@ from airflow.utils.state import TaskInstanceState if TYPE_CHECKING: - from sqlalchemy.orm import Session from sqlalchemy.sql import Select - from airflow.triggers.base import BaseTrigger + from airflow.triggers.base import BaseTrigger, TriggerEvent TRIGGER_FAIL_REPR = "__fail__" """String value to represent trigger failure. @@ -230,7 +229,7 @@ def clean_unused(cls, session: Session = NEW_SESSION) -> None: @classmethod @provide_session - def submit_event(cls, trigger_id, event: events.TriggerEvent, session: Session = NEW_SESSION) -> None: + def submit_event(cls, trigger_id, event: TriggerEvent, session: Session = NEW_SESSION) -> None: """ Fire an event. @@ -373,7 +372,7 @@ def get_sorted_triggers(cls, capacity: int, alive_triggerer_ids: list[int] | Sel @singledispatch -def handle_event_submit(event: events.TriggerEvent, *, task_instance: TaskInstance, session: Session) -> None: +def handle_event_submit(event: TriggerEvent, *, task_instance: TaskInstance, session: Session) -> None: """ Handle the submit event for a given task instance. @@ -404,10 +403,8 @@ def handle_event_submit(event: events.TriggerEvent, *, task_instance: TaskInstan session.flush() -@handle_event_submit.register(events.BaseTaskEndEvent) -def _process_BaseTaskEndEvent( - event: events.BaseTaskEndEvent, *, task_instance: TaskInstance, session: Session -) -> None: +@handle_event_submit.register +def _(event: BaseTaskEndEvent, *, task_instance: TaskInstance, session: Session) -> None: """ Submit event for the given task instance. diff --git a/airflow-core/src/airflow/models/xcom.py b/airflow-core/src/airflow/models/xcom.py index 5efb9414d1a78..badb86bd43de5 100644 --- a/airflow-core/src/airflow/models/xcom.py +++ b/airflow-core/src/airflow/models/xcom.py @@ -47,13 +47,6 @@ from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.sqlalchemy import UtcDateTime -# XCom constants below are needed for providers backward compatibility, -# which should import the constants directly after apache-airflow>=2.6.0 -from airflow.utils.xcom import ( - MAX_XCOM_SIZE, # noqa: F401 - XCOM_RETURN_KEY, -) - log = logging.getLogger(__name__) if TYPE_CHECKING: @@ -62,6 +55,9 @@ from sqlalchemy.sql.expression import Select, TextClause +XCOM_RETURN_KEY = "return_value" + + class XComModel(TaskInstanceDependencies): """XCom model class. Contains table and some utilities.""" diff --git a/airflow-core/src/airflow/models/xcom_arg.py b/airflow-core/src/airflow/models/xcom_arg.py index cfda9295cec26..1109f03bb1f99 100644 --- a/airflow-core/src/airflow/models/xcom_arg.py +++ b/airflow-core/src/airflow/models/xcom_arg.py @@ -25,6 +25,7 @@ from sqlalchemy import func, or_, select from sqlalchemy.orm import Session +from airflow.models.xcom import XCOM_RETURN_KEY from airflow.sdk.definitions._internal.types import ArgNotSet from airflow.sdk.definitions.mappedoperator import MappedOperator from airflow.sdk.definitions.xcom_arg import ( @@ -33,7 +34,6 @@ from airflow.utils.db import exists_query from airflow.utils.state import State from airflow.utils.types import NOTSET -from airflow.utils.xcom import XCOM_RETURN_KEY __all__ = ["XComArg", "get_task_map_length"] diff --git a/airflow-core/src/airflow/plugins_manager.py b/airflow-core/src/airflow/plugins_manager.py index 2b0b324e9e352..d750ff7c405cd 100644 --- a/airflow-core/src/airflow/plugins_manager.py +++ b/airflow-core/src/airflow/plugins_manager.py @@ -387,12 +387,46 @@ def initialize_ui_plugins(): log.debug("Initialize UI plugin") + seen_url_route = {} external_views = [] react_apps = [] for plugin in plugins: - external_views.extend(plugin.external_views) - react_apps.extend(plugin.react_apps) + for external_view in plugin.external_views: + url_route = external_view["url_route"] + if url_route is not None and url_route in seen_url_route: + log.warning( + "Plugin '%s' has an external view with an URL route '%s' " + "that conflicts with another plugin '%s'. The view will not be loaded.", + plugin.name, + url_route, + seen_url_route[url_route], + ) + # Mutate in place the plugin's external views to remove the conflicting view + # because some function still access the plugin's external views and not the + # global `external_views` variable. (get_plugin_info, for example) + plugin.external_views.remove(external_view) + continue + external_views.append(external_view) + seen_url_route[url_route] = plugin.name + + for react_app in plugin.react_apps: + url_route = react_app["url_route"] + if url_route is not None and url_route in seen_url_route: + log.warning( + "Plugin '%s' has a React App with an URL route '%s' " + "that conflicts with another plugin '%s'. The React App will not be loaded.", + plugin.name, + url_route, + seen_url_route[url_route], + ) + # Mutate in place the plugin's React Apps to remove the conflicting app + # because some function still access the plugin's React Apps and not the + # global `react_apps` variable. (get_plugin_info, for example) + plugin.react_apps.remove(react_app) + continue + react_apps.append(react_app) + seen_url_route[url_route] = plugin.name def initialize_flask_plugins(): diff --git a/airflow-core/src/airflow/policies.py b/airflow-core/src/airflow/policies.py index 6e995733ca0d1..933ccaa24522c 100644 --- a/airflow-core/src/airflow/policies.py +++ b/airflow-core/src/airflow/policies.py @@ -27,9 +27,9 @@ __all__: list[str] = ["hookimpl"] if TYPE_CHECKING: - from airflow.models.baseoperator import BaseOperator from airflow.models.dag import DAG from airflow.models.taskinstance import TaskInstance + from airflow.serialization.serialized_objects import SerializedBaseOperator as BaseOperator @local_settings_hookspec diff --git a/airflow-core/src/airflow/providers_manager.py b/airflow-core/src/airflow/providers_manager.py index eba323f869d53..9a4ba30397ab7 100644 --- a/airflow-core/src/airflow/providers_manager.py +++ b/airflow-core/src/airflow/providers_manager.py @@ -31,12 +31,11 @@ from functools import wraps from importlib.resources import files as resource_files from time import perf_counter -from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar +from typing import TYPE_CHECKING, Any, NamedTuple, ParamSpec, TypeVar from packaging.utils import canonicalize_name from airflow.exceptions import AirflowOptionalProviderFeatureException -from airflow.typing_compat import ParamSpec from airflow.utils.entry_points import entry_points_with_dist from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.module_loading import import_string diff --git a/airflow-core/src/airflow/serialization/serde.py b/airflow-core/src/airflow/serialization/serde.py index b145607f4ccf7..56faa78b3afa4 100644 --- a/airflow-core/src/airflow/serialization/serde.py +++ b/airflow-core/src/airflow/serialization/serde.py @@ -284,7 +284,12 @@ def deserialize(o: T | None, full=True, type_hint: Any = None) -> object: class_version, ) - return cls(**deserialize(value)) + deserialize_value = deserialize(value) + if not isinstance(deserialize_value, dict): + raise TypeError( + f"deserialized value for {classname} is not a dict, got {type(deserialize_value)}" + ) + return cls(**deserialize_value) # type: ignore[operator] # no deserializer available raise TypeError(f"No deserializer found for {classname}") diff --git a/airflow-core/src/airflow/serialization/serialized_objects.py b/airflow-core/src/airflow/serialization/serialized_objects.py index fb0d85d92f288..0160ea5bf28eb 100644 --- a/airflow-core/src/airflow/serialization/serialized_objects.py +++ b/airflow-core/src/airflow/serialization/serialized_objects.py @@ -26,14 +26,15 @@ import itertools import logging import weakref -from collections.abc import Collection, Generator, Iterable, Mapping +from collections.abc import Collection, Generator, Iterable, Iterator, Mapping, Sequence from functools import cache, cached_property from inspect import signature from textwrap import dedent -from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, TypeVar, Union, cast, overload import attrs import lazy_object_proxy +import methodtools import pydantic from dateutil import relativedelta from pendulum.tz.timezone import FixedTimezone, Timezone @@ -41,7 +42,6 @@ from airflow import macros from airflow.callbacks.callback_requests import DagCallbackRequest, TaskCallbackRequest from airflow.exceptions import AirflowException, SerializationError, TaskDeferred -from airflow.models.baseoperator import BaseOperator from airflow.models.connection import Connection from airflow.models.dag import DAG, _get_model_data_interval from airflow.models.expandinput import ( @@ -50,25 +50,22 @@ from airflow.models.taskinstancekey import TaskInstanceKey from airflow.models.xcom import XComModel from airflow.models.xcom_arg import SchedulerXComArg, deserialize_xcom_arg -from airflow.sdk.bases.operator import BaseOperator as TaskSDKBaseOperator +from airflow.sdk import Asset, AssetAlias, AssetAll, AssetAny, AssetWatcher, BaseOperator, XComArg +from airflow.sdk.bases.operator import OPERATOR_DEFAULTS # TODO: Copy this into the scheduler? from airflow.sdk.definitions._internal.expandinput import EXPAND_INPUT_EMPTY +from airflow.sdk.definitions._internal.node import DAGNode from airflow.sdk.definitions.asset import ( - Asset, - AssetAlias, AssetAliasEvent, AssetAliasUniqueKey, - AssetAll, - AssetAny, AssetRef, AssetUniqueKey, - AssetWatcher, BaseAsset, ) from airflow.sdk.definitions.deadline import DeadlineAlert from airflow.sdk.definitions.mappedoperator import MappedOperator from airflow.sdk.definitions.param import Param, ParamsDict from airflow.sdk.definitions.taskgroup import MappedTaskGroup, TaskGroup -from airflow.sdk.definitions.xcom_arg import XComArg, serialize_xcom_arg +from airflow.sdk.definitions.xcom_arg import serialize_xcom_arg from airflow.sdk.execution_time.context import OutletEventAccessor, OutletEventAccessors from airflow.serialization.dag_dependency import DagDependency from airflow.serialization.enums import DagAttributeTypes as DAT, Encoding @@ -80,6 +77,11 @@ airflow_priority_weight_strategies, airflow_priority_weight_strategies_classes, ) +from airflow.ti_deps.deps.mapped_task_upstream_dep import MappedTaskUpstreamDep +from airflow.ti_deps.deps.not_in_retry_period_dep import NotInRetryPeriodDep +from airflow.ti_deps.deps.not_previously_skipped_dep import NotPreviouslySkippedDep +from airflow.ti_deps.deps.prev_dagrun_dep import PrevDagrunDep +from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep from airflow.triggers.base import BaseTrigger, StartTriggerArgs from airflow.utils.code_utils import get_python_source from airflow.utils.context import ( @@ -98,15 +100,19 @@ if TYPE_CHECKING: from inspect import Parameter + from sqlalchemy.orm import Session + from airflow.models import DagRun from airflow.models.expandinput import SchedulerExpandInput + from airflow.models.mappedoperator import MappedOperator as SchedulerMappedOperator from airflow.models.taskinstance import TaskInstance - from airflow.sdk import BaseOperatorLink - from airflow.sdk.definitions._internal.node import DAGNode + from airflow.sdk import DAG as SdkDag, BaseOperatorLink from airflow.sdk.types import Operator from airflow.serialization.json_schema import Validator from airflow.timetables.base import DagRunInfo, DataInterval, Timetable from airflow.triggers.base import BaseEventTrigger + from airflow.typing_compat import Self + from airflow.utils.trigger_rule import TriggerRule HAS_KUBERNETES: bool try: @@ -116,6 +122,16 @@ except ImportError: pass +DEFAULT_OPERATOR_DEPS = frozenset( + ( + NotInRetryPeriodDep(), + PrevDagrunDep(), + TriggerRuleDep(), + NotPreviouslySkippedDep(), + MappedTaskUpstreamDep(), + ) +) + log = logging.getLogger(__name__) @@ -592,12 +608,12 @@ class BaseSerialization: SERIALIZER_VERSION = 2 @classmethod - def to_json(cls, var: DAG | BaseOperator | dict | list | set | tuple) -> str: + def to_json(cls, var: DAG | SerializedBaseOperator | dict | list | set | tuple) -> str: """Stringify DAGs and operators contained by var and returns a JSON string of var.""" return json.dumps(cls.to_dict(var), ensure_ascii=True) @classmethod - def to_dict(cls, var: DAG | BaseOperator | dict | list | set | tuple) -> dict: + def to_dict(cls, var: DAG | SerializedBaseOperator | dict | list | set | tuple) -> dict: """Stringify DAGs and operators contained by var and returns a dict of var.""" # Don't call on this class directly - only SerializedDAG or # SerializedBaseOperator should be used as the "entrypoint" @@ -652,7 +668,7 @@ def _is_excluded(cls, var: Any, attrname: str, instance: Any) -> bool: @classmethod def serialize_to_json( cls, - object_to_serialize: TaskSDKBaseOperator | MappedOperator | DAG, + object_to_serialize: BaseOperator | MappedOperator | SerializedBaseOperator | DAG, decorated_fields: set, ) -> dict[str, Any]: """Serialize an object to JSON.""" @@ -738,7 +754,7 @@ def serialize( return var.to_dict() elif isinstance(var, MappedOperator): return cls._encode(SerializedBaseOperator.serialize_mapped_operator(var), type_=DAT.OP) - elif isinstance(var, TaskSDKBaseOperator): + elif isinstance(var, (BaseOperator, SerializedBaseOperator)): var._needs_expansion = var.get_needs_expansion() return cls._encode(SerializedBaseOperator.serialize_operator(var), type_=DAT.OP) elif isinstance(var, cls._datetime_types): @@ -1070,6 +1086,10 @@ def detect_task_dependencies(task: Operator) -> list[DagDependency]: from airflow.providers.standard.operators.trigger_dagrun import TriggerDagRunOperator from airflow.providers.standard.sensors.external_task import ExternalTaskSensor + # TODO (GH-52141): Separate MappedOperator implementation to get rid of this. + if TYPE_CHECKING: + assert isinstance(task.operator_class, type) + deps = [] if isinstance(task, TriggerDagRunOperator): deps.append( @@ -1083,7 +1103,7 @@ def detect_task_dependencies(task: Operator) -> list[DagDependency]: ) elif ( isinstance(task, MappedOperator) - and issubclass(cast("type[BaseOperator]", task.operator_class), TriggerDagRunOperator) + and issubclass(task.operator_class, TriggerDagRunOperator) and "trigger_dag_id" in task.partial_kwargs ): deps.append( @@ -1107,7 +1127,7 @@ def detect_task_dependencies(task: Operator) -> list[DagDependency]: ) elif ( isinstance(task, MappedOperator) - and issubclass(cast("type[BaseOperator]", task.operator_class), ExternalTaskSensor) + and issubclass(task.operator_class, ExternalTaskSensor) and "external_dag_id" in task.partial_kwargs ): deps.append( @@ -1144,7 +1164,8 @@ def detect_dag_dependencies(dag: DAG | None) -> Iterable[DagDependency]: yield from dag.timetable.asset_condition.iter_dag_dependencies(source="", target=dag.dag_id) -class SerializedBaseOperator(BaseOperator, BaseSerialization): +# TODO (GH-52141): Duplicate DAGNode in the scheduler. +class SerializedBaseOperator(DAGNode, BaseSerialization): """ A JSON serializable representation of operator. @@ -1163,15 +1184,52 @@ class SerializedBaseOperator(BaseOperator, BaseSerialization): _CONSTRUCTOR_PARAMS = { k: v.default - for k, v in itertools.chain( - signature(BaseOperator.__init__).parameters.items(), - signature(TaskSDKBaseOperator.__init__).parameters.items(), - ) + for k, v in signature(BaseOperator.__init__).parameters.items() if v.default is not v.empty } - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + _can_skip_downstream: bool + _is_empty: bool + _needs_expansion: bool + _task_display_name: str | None + depends_on_past: bool + execution_timeout: datetime.timedelta | None + executor: str | None + executor_config: dict | None + ignore_first_depends_on_past: bool + inlets: Sequence + is_setup: bool + is_teardown: bool + on_execute_callback: Sequence + on_success_callback: Sequence + outlets: Sequence + pool: str + pool_slots: int + priority_weight: int + queue: str + retries: int | None + run_as_user: str | None + start_from_trigger: bool + start_trigger_args: StartTriggerArgs + trigger_rule: TriggerRule + wait_for_downstream: bool + weight_rule: PriorityWeightStrategy + + is_mapped = False + + def __init__( + self, + *, + task_id: str, + params: Mapping[str, Any] | None = None, + _airflow_from_mapped: bool = False, + ) -> None: + super().__init__() + self.__dict__.update(self._CONSTRUCTOR_PARAMS) + self.__dict__.update(OPERATOR_DEFAULTS) + self._BaseOperator__from_mapped = _airflow_from_mapped + self.task_id = task_id + self.params = ParamsDict(params) # task_type is used by UI to display the correct class type, because UI only # receives BaseOperator from deserialized DAGs. self._task_type = "BaseOperator" @@ -1181,26 +1239,30 @@ def __init__(self, *args, **kwargs): self.template_ext = BaseOperator.template_ext self.template_fields = BaseOperator.template_fields self.operator_extra_links = BaseOperator.operator_extra_links - self._operator_name = None + self.deps = DEFAULT_OPERATOR_DEPS + self._operator_name: str | None = None - @cached_property - def operator_extra_link_dict(self) -> dict[str, BaseOperatorLink]: - """Returns dictionary of all extra links for the operator.""" - op_extra_links_from_plugin: dict[str, Any] = {} - from airflow import plugins_manager + @property + def node_id(self) -> str: + return self.task_id - plugins_manager.initialize_extra_operators_links_plugins() - if plugins_manager.operator_extra_links is None: - raise AirflowException("Can't load operators") - for ope in plugins_manager.operator_extra_links: - if ope.operators and self.operator_class in ope.operators: - op_extra_links_from_plugin.update({ope.name: ope}) + def get_dag(self) -> SdkDag | None: + return self.dag - operator_extra_links_all = {link.name: link for link in self.operator_extra_links} - # Extra links defined in Plugins overrides operator links defined in operator - operator_extra_links_all.update(op_extra_links_from_plugin) + @property + def roots(self) -> Sequence[DAGNode]: + """Required by DAGNode.""" + return [self] - return operator_extra_links_all + @property + def leaves(self) -> Sequence[DAGNode]: + """Required by DAGNode.""" + return [self] + + @cached_property + def operator_extra_link_dict(self) -> dict[str, BaseOperatorLink]: + """Returns dictionary of all extra links for the operator.""" + return {link.name: link for link in self.operator_extra_links} @cached_property def global_operator_extra_link_dict(self) -> dict[str, Any]: @@ -1254,6 +1316,22 @@ def operator_name(self) -> str: def operator_name(self, operator_name: str): self._operator_name = operator_name + @property + def task_display_name(self) -> str: + return self._task_display_name or self.task_id + + # TODO (GH-52141): For compatibility... can we just rename this? + @property + def on_failure_fail_dagrun(self): + return self._on_failure_fail_dagrun + + @on_failure_fail_dagrun.setter + def on_failure_fail_dagrun(self, value): + self._on_failure_fail_dagrun = value + + def expand_start_trigger_args(self, *, context: Context) -> StartTriggerArgs | None: + return self.start_trigger_args + @classmethod def serialize_mapped_operator(cls, op: MappedOperator) -> dict[str, Any]: serialized_op = cls._serialize_node(op) @@ -1281,11 +1359,11 @@ def serialize_mapped_operator(cls, op: MappedOperator) -> dict[str, Any]: return serialized_op @classmethod - def serialize_operator(cls, op: TaskSDKBaseOperator | MappedOperator) -> dict[str, Any]: + def serialize_operator(cls, op: BaseOperator | MappedOperator | SerializedBaseOperator) -> dict[str, Any]: return cls._serialize_node(op) @classmethod - def _serialize_node(cls, op: TaskSDKBaseOperator | MappedOperator) -> dict[str, Any]: + def _serialize_node(cls, op: BaseOperator | MappedOperator | SerializedBaseOperator) -> dict[str, Any]: """Serialize operator into a JSON object.""" serialize_op = cls.serialize_to_json(op, cls._decorated_fields) @@ -1345,7 +1423,11 @@ def _serialize_node(cls, op: TaskSDKBaseOperator | MappedOperator) -> dict[str, return serialize_op @classmethod - def populate_operator(cls, op: Operator, encoded_op: dict[str, Any]) -> None: + def populate_operator( + cls, + op: SchedulerMappedOperator | SerializedBaseOperator, + encoded_op: dict[str, Any], + ) -> None: """ Populate operator attributes with serialized values. @@ -1509,23 +1591,24 @@ def set_task_dag_references(task: Operator, dag: DAG) -> None: dag.task_dict[task_id].upstream_task_ids.add(task.task_id) @classmethod - def deserialize_operator(cls, encoded_op: dict[str, Any]) -> Operator: + def deserialize_operator( + cls, + encoded_op: dict[str, Any], + ) -> SchedulerMappedOperator | SerializedBaseOperator: """Deserializes an operator from a JSON object.""" - op: Operator + op: SchedulerMappedOperator | SerializedBaseOperator if encoded_op.get("_is_mapped", False): # Most of these will be loaded later, these are just some stand-ins. - op_data = { - k: v for k, v in encoded_op.items() if k in TaskSDKBaseOperator.get_serialized_fields() - } + op_data = {k: v for k, v in encoded_op.items() if k in BaseOperator.get_serialized_fields()} - from airflow.models.mappedoperator import MappedOperator as MappedOperatorWithDB + from airflow.models.mappedoperator import MappedOperator as SchedulerMappedOperator try: operator_name = encoded_op["_operator_name"] except KeyError: operator_name = encoded_op["task_type"] - op = MappedOperatorWithDB( + op = SchedulerMappedOperator( operator_class=op_data, expand_input=EXPAND_INPUT_EMPTY, partial_kwargs={}, @@ -1639,6 +1722,141 @@ def serialize(cls, var: Any, *, strict: bool = False) -> Any: def deserialize(cls, encoded_var: Any) -> Any: return BaseSerialization.deserialize(encoded_var=encoded_var) + def serialize_for_task_group(self) -> tuple[DAT, Any]: + """Serialize; required by DAGNode.""" + return DAT.OP, self.task_id + + @property + def inherits_from_empty_operator(self) -> bool: + return self._is_empty + + @property + def inherits_from_skipmixin(self) -> bool: + return self._can_skip_downstream + + def expand_start_from_trigger(self, *, context: Context, session: Session) -> bool: + """ + Get the start_from_trigger value of the current abstract operator. + + Since a BaseOperator is not mapped to begin with, this simply returns + the original value of start_from_trigger. + + :meta private: + """ + return self.start_from_trigger + + def get_serialized_fields(self): + return BaseOperator.get_serialized_fields() + + def unmap(self, resolve: None) -> Self: + return self + + def _iter_all_mapped_downstreams(self) -> Iterator[MappedOperator | MappedTaskGroup]: + """ + Return mapped nodes that are direct dependencies of the current task. + + For now, this walks the entire DAG to find mapped nodes that has this + current task as an upstream. We cannot use ``downstream_list`` since it + only contains operators, not task groups. In the future, we should + provide a way to record an DAG node's all downstream nodes instead. + + Note that this does not guarantee the returned tasks actually use the + current task for task mapping, but only checks those task are mapped + operators, and are downstreams of the current task. + + To get a list of tasks that uses the current task for task mapping, use + :meth:`iter_mapped_dependants` instead. + """ + + def _walk_group(group: TaskGroup) -> Iterable[tuple[str, DAGNode]]: + """ + Recursively walk children in a task group. + + This yields all direct children (including both tasks and task + groups), and all children of any task groups. + """ + for key, child in group.children.items(): + yield key, child + if isinstance(child, TaskGroup): + yield from _walk_group(child) + + if not (dag := self.dag): + raise RuntimeError("Cannot check for mapped dependants when not attached to a DAG") + for key, child in _walk_group(dag.task_group): + if key == self.node_id: + continue + if not isinstance(child, MappedOperator | MappedTaskGroup): + continue + if self.node_id in child.upstream_task_ids: + yield child + + def iter_mapped_dependants(self) -> Iterator[MappedOperator | MappedTaskGroup]: + """ + Return mapped nodes that depend on the current task the expansion. + + For now, this walks the entire DAG to find mapped nodes that has this + current task as an upstream. We cannot use ``downstream_list`` since it + only contains operators, not task groups. In the future, we should + provide a way to record an DAG node's all downstream nodes instead. + """ + return ( + downstream + for downstream in self._iter_all_mapped_downstreams() + if any(p.node_id == self.node_id for p in downstream.iter_mapped_dependencies()) + ) + + # TODO (GH-52141): Copied from sdk. Find a better place for this to live in. + def iter_mapped_task_groups(self) -> Iterator[MappedTaskGroup]: + """ + Return mapped task groups this task belongs to. + + Groups are returned from the innermost to the outmost. + + :meta private: + """ + if (group := self.task_group) is None: + return + yield from group.iter_mapped_task_groups() + + # TODO (GH-52141): Copied from sdk. Find a better place for this to live in. + def get_closest_mapped_task_group(self) -> MappedTaskGroup | None: + """ + Get the mapped task group "closest" to this task in the DAG. + + :meta private: + """ + return next(self.iter_mapped_task_groups(), None) + + # TODO (GH-52141): Copied from sdk. Find a better place for this to live in. + def get_needs_expansion(self) -> bool: + """ + Return true if the task is MappedOperator or is in a mapped task group. + + :meta private: + """ + return self._needs_expansion + + # TODO (GH-52141): Copied from sdk. Find a better place for this to live in. + @methodtools.lru_cache(maxsize=1) + def get_parse_time_mapped_ti_count(self) -> int: + """ + Return the number of mapped task instances that can be created on DAG run creation. + + This only considers literal mapped arguments, and would return *None* + when any non-literal values are used for mapping. + + :raise NotFullyPopulated: If non-literal mapped arguments are encountered. + :raise NotMapped: If the operator is neither mapped, nor has any parent + mapped task groups. + :return: Total number of mapped TIs this task should have. + """ + from airflow.sdk.definitions._internal.abstractoperator import NotMapped + + group = self.get_closest_mapped_task_group() + if group is None: + raise NotMapped() + return group.get_parse_time_mapped_ti_count() + class SerializedDAG(DAG, BaseSerialization): """ @@ -2140,7 +2358,7 @@ def get_task_assets( ports_getter = self._get_mapped_operator_ports else: ports_getter = self._get_base_operator_ports - directions = ("inlets",) if inlets else () + directions: tuple[str, ...] = ("inlets",) if inlets else () if outlets: directions += ("outlets",) for direction in directions: @@ -2198,3 +2416,27 @@ def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey) -> str: ) return "" return XComModel.deserialize_value(value) + + +@overload +def create_scheduler_operator(op: BaseOperator | SerializedBaseOperator) -> SerializedBaseOperator: ... + + +@overload +def create_scheduler_operator(op: MappedOperator | SchedulerMappedOperator) -> SchedulerMappedOperator: ... + + +def create_scheduler_operator( + op: BaseOperator | MappedOperator | SerializedBaseOperator | SchedulerMappedOperator, +) -> SerializedBaseOperator | SchedulerMappedOperator: + from airflow.models.mappedoperator import MappedOperator as SchedulerMappedOperator + + if isinstance(op, (SchedulerMappedOperator, SerializedBaseOperator)): + return op + if isinstance(op, BaseOperator): + d = SerializedBaseOperator.serialize_operator(op) + elif isinstance(op, MappedOperator): + d = SerializedBaseOperator.serialize_mapped_operator(op) + else: + raise TypeError(type(op).__name__) + return SerializedBaseOperator.deserialize_operator(d) diff --git a/airflow-core/src/airflow/settings.py b/airflow-core/src/airflow/settings.py index 689ce2e4e6819..ca8de146775e2 100644 --- a/airflow-core/src/airflow/settings.py +++ b/airflow-core/src/airflow/settings.py @@ -22,7 +22,6 @@ import json import logging import os -import platform import sys import warnings from collections.abc import Callable @@ -321,6 +320,20 @@ def _is_sqlite_db_path_relative(sqla_conn_str: str) -> bool: return True +def _configure_async_session(): + global async_engine + global AsyncSession + + async_engine = create_async_engine(SQL_ALCHEMY_CONN_ASYNC, future=True) + AsyncSession = sessionmaker( + bind=async_engine, + autocommit=False, + autoflush=False, + class_=SAAsyncSession, + expire_on_commit=False, + ) + + def configure_orm(disable_connection_pool=False, pool_class=None): """Configure ORM using SQLAlchemy.""" from airflow.sdk.execution_time.secrets_masker import mask_secret @@ -335,8 +348,6 @@ def configure_orm(disable_connection_pool=False, pool_class=None): global Session global engine - global async_engine - global AsyncSession global NonScopedSession if os.environ.get("_AIRFLOW_SKIP_DB_TESTS") == "true": @@ -359,34 +370,24 @@ def configure_orm(disable_connection_pool=False, pool_class=None): connect_args["check_same_thread"] = False engine = create_engine(SQL_ALCHEMY_CONN, connect_args=connect_args, **engine_args, future=True) - async_engine = create_async_engine(SQL_ALCHEMY_CONN_ASYNC, future=True) - AsyncSession = sessionmaker( - bind=async_engine, - autocommit=False, - autoflush=False, - class_=SAAsyncSession, - expire_on_commit=False, - ) mask_secret(engine.url.password) - setup_event_handlers(engine) if conf.has_option("database", "sql_alchemy_session_maker"): _session_maker = conf.getimport("database", "sql_alchemy_session_maker") else: - - def _session_maker(_engine): - return sessionmaker( - autocommit=False, - autoflush=False, - bind=_engine, - expire_on_commit=False, - ) - + _session_maker = functools.partial( + sessionmaker, + autocommit=False, + autoflush=False, + expire_on_commit=False, + ) NonScopedSession = _session_maker(engine) Session = scoped_session(NonScopedSession) - if not platform.system() == "Windows": + _configure_async_session() + + if register_at_fork := getattr(os, "register_at_fork", None): # https://docs.sqlalchemy.org/en/20/core/pooling.html#using-connection-pools-with-multiprocessing-or-os-fork def clean_in_fork(): _globals = globals() @@ -396,7 +397,7 @@ def clean_in_fork(): async_engine.sync_engine.dispose(close=False) # Won't work on Windows - os.register_at_fork(after_in_child=clean_in_fork) + register_at_fork(after_in_child=clean_in_fork) DEFAULT_ENGINE_ARGS = { diff --git a/airflow-core/src/airflow/ti_deps/deps/mapped_task_upstream_dep.py b/airflow-core/src/airflow/ti_deps/deps/mapped_task_upstream_dep.py index 97531ef4257e6..2f3c8015af29c 100644 --- a/airflow-core/src/airflow/ti_deps/deps/mapped_task_upstream_dep.py +++ b/airflow-core/src/airflow/ti_deps/deps/mapped_task_upstream_dep.py @@ -22,13 +22,13 @@ from sqlalchemy import select -from airflow.models.taskinstance import TaskInstance from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.utils.state import State, TaskInstanceState if TYPE_CHECKING: from sqlalchemy.orm import Session + from airflow.models.taskinstance import TaskInstance from airflow.ti_deps.dep_context import DepContext from airflow.ti_deps.deps.base_ti_dep import TIDepStatus @@ -51,6 +51,7 @@ def _get_dep_statuses( session: Session, dep_context: DepContext, ) -> Iterator[TIDepStatus]: + from airflow.models.taskinstance import TaskInstance from airflow.sdk.definitions.mappedoperator import MappedOperator if isinstance(ti.task, MappedOperator): diff --git a/airflow-core/src/airflow/ti_deps/deps/prev_dagrun_dep.py b/airflow-core/src/airflow/ti_deps/deps/prev_dagrun_dep.py index 9ce5c1134240a..1f4363c586ee1 100644 --- a/airflow-core/src/airflow/ti_deps/deps/prev_dagrun_dep.py +++ b/airflow-core/src/airflow/ti_deps/deps/prev_dagrun_dep.py @@ -33,6 +33,7 @@ from sqlalchemy.orm import Session from airflow.sdk.types import Operator + from airflow.serialization.serialized_objects import SerializedBaseOperator _SUCCESSFUL_STATES = (TaskInstanceState.SKIPPED, TaskInstanceState.SUCCESS) @@ -104,7 +105,12 @@ def _count_unsuccessful_tis(dagrun: DagRun, task_id: str, *, session: Session) - ) @staticmethod - def _has_unsuccessful_dependants(dagrun: DagRun, task: Operator, *, session: Session) -> bool: + def _has_unsuccessful_dependants( + dagrun: DagRun, + task: Operator | SerializedBaseOperator, + *, + session: Session, + ) -> bool: """ Check if any of the task's dependants are unsuccessful in a given run. diff --git a/airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py b/airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py index 1475756cbcbca..9a4f30c8dff09 100644 --- a/airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py +++ b/airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py @@ -139,12 +139,12 @@ def _get_expanded_ti_count() -> int: This extra closure allows us to query the database only when needed, and at most once. """ - from airflow.models.baseoperator import BaseOperator + from airflow.models.mappedoperator import get_mapped_ti_count if TYPE_CHECKING: assert ti.task - return BaseOperator.get_mapped_ti_count(ti.task, ti.run_id, session=session) + return get_mapped_ti_count(ti.task, ti.run_id, session=session) def _iter_expansion_dependencies(task_group: MappedTaskGroup) -> Iterator[str]: from airflow.sdk.definitions.mappedoperator import MappedOperator diff --git a/airflow-core/src/airflow/typing_compat.py b/airflow-core/src/airflow/typing_compat.py index 283537e7e07b4..8a00ac06bd7f1 100644 --- a/airflow-core/src/airflow/typing_compat.py +++ b/airflow-core/src/airflow/typing_compat.py @@ -29,17 +29,8 @@ import sys -# Literal from typing module has various issues in different Python versions, see: -# - https://typing-extensions.readthedocs.io/en/latest/#Literal -# - bpo-45679: https://github.com/python/cpython/pull/29334 -# - bpo-42345: https://github.com/python/cpython/pull/23294 -# - bpo-42345: https://github.com/python/cpython/pull/23383 -if sys.version_info >= (3, 10, 1) or (3, 9, 8) <= sys.version_info < (3, 10): - from typing import Literal -else: - from typing import Literal # type: ignore[assignment] - -from typing import ParamSpec, TypeAlias, TypeGuard +# Keeping this for backwards-compat with old providers +from typing import Literal, ParamSpec, TypeAlias, TypeGuard if sys.version_info >= (3, 11): from typing import Self diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts index d538e3e617596..91fd42bd07909 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts @@ -1,7 +1,7 @@ // generated with @7nohe/openapi-react-query-codegen@1.6.2 import { UseQueryResult } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; export type AssetServiceGetAssetsDefaultResponse = Awaited>; export type AssetServiceGetAssetsQueryResult = UseQueryResult; @@ -140,7 +140,7 @@ export const UseDagRunServiceGetUpstreamAssetEventsKeyFn = ({ dagId, dagRunId }: export type DagRunServiceGetDagRunsDefaultResponse = Awaited>; export type DagRunServiceGetDagRunsQueryResult = UseQueryResult; export const useDagRunServiceGetDagRunsKey = "DagRunServiceGetDagRuns"; -export const UseDagRunServiceGetDagRunsKeyFn = ({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { +export const UseDagRunServiceGetDagRunsKeyFn = ({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { dagId: string; endDateGte?: string; endDateLte?: string; @@ -151,13 +151,32 @@ export const UseDagRunServiceGetDagRunsKeyFn = ({ dagId, endDateGte, endDateLte, orderBy?: string; runAfterGte?: string; runAfterLte?: string; + runIdPattern?: string; runType?: string[]; startDateGte?: string; startDateLte?: string; state?: string[]; updatedAtGte?: string; updatedAtLte?: string; -}, queryKey?: Array) => [useDagRunServiceGetDagRunsKey, ...(queryKey ?? [{ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }])]; +}, queryKey?: Array) => [useDagRunServiceGetDagRunsKey, ...(queryKey ?? [{ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }])]; +export type DagRunServiceWaitDagRunUntilFinishedDefaultResponse = Awaited>; +export type DagRunServiceWaitDagRunUntilFinishedQueryResult = UseQueryResult; +export const useDagRunServiceWaitDagRunUntilFinishedKey = "DagRunServiceWaitDagRunUntilFinished"; +export const UseDagRunServiceWaitDagRunUntilFinishedKeyFn = ({ dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}, queryKey?: Array) => [useDagRunServiceWaitDagRunUntilFinishedKey, ...(queryKey ?? [{ dagId, dagRunId, interval, result }])]; +export type ExperimentalServiceWaitDagRunUntilFinishedDefaultResponse = Awaited>; +export type ExperimentalServiceWaitDagRunUntilFinishedQueryResult = UseQueryResult; +export const useExperimentalServiceWaitDagRunUntilFinishedKey = "ExperimentalServiceWaitDagRunUntilFinished"; +export const UseExperimentalServiceWaitDagRunUntilFinishedKeyFn = ({ dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}, queryKey?: Array) => [useExperimentalServiceWaitDagRunUntilFinishedKey, ...(queryKey ?? [{ dagId, dagRunId, interval, result }])]; export type DagSourceServiceGetDagSourceDefaultResponse = Awaited>; export type DagSourceServiceGetDagSourceQueryResult = UseQueryResult; export const useDagSourceServiceGetDagSourceKey = "DagSourceServiceGetDagSource"; @@ -210,7 +229,7 @@ export const UseDagWarningServiceListDagWarningsKeyFn = ({ dagId, limit, offset, export type DagServiceGetDagsDefaultResponse = Awaited>; export type DagServiceGetDagsQueryResult = UseQueryResult; export const useDagServiceGetDagsKey = "DagServiceGetDags"; -export const UseDagServiceGetDagsKeyFn = ({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { +export const UseDagServiceGetDagsKeyFn = ({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { dagDisplayNamePattern?: string; dagIdPattern?: string; dagRunEndDateGte?: string; @@ -219,6 +238,7 @@ export const UseDagServiceGetDagsKeyFn = ({ dagDisplayNamePattern, dagIdPattern, dagRunStartDateLte?: string; dagRunState?: string[]; excludeStale?: boolean; + isFavorite?: boolean; lastDagRunState?: DagRunState; limit?: number; offset?: number; @@ -227,7 +247,7 @@ export const UseDagServiceGetDagsKeyFn = ({ dagDisplayNamePattern, dagIdPattern, paused?: boolean; tags?: string[]; tagsMatchMode?: "any" | "all"; -} = {}, queryKey?: Array) => [useDagServiceGetDagsKey, ...(queryKey ?? [{ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }])]; +} = {}, queryKey?: Array) => [useDagServiceGetDagsKey, ...(queryKey ?? [{ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }])]; export type DagServiceGetDagDefaultResponse = Awaited>; export type DagServiceGetDagQueryResult = UseQueryResult; export const useDagServiceGetDagKey = "DagServiceGetDag"; @@ -252,12 +272,13 @@ export const UseDagServiceGetDagTagsKeyFn = ({ limit, offset, orderBy, tagNamePa export type DagServiceGetDagsUiDefaultResponse = Awaited>; export type DagServiceGetDagsUiQueryResult = UseQueryResult; export const useDagServiceGetDagsUiKey = "DagServiceGetDagsUi"; -export const UseDagServiceGetDagsUiKeyFn = ({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { +export const UseDagServiceGetDagsUiKeyFn = ({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { dagDisplayNamePattern?: string; dagIdPattern?: string; dagIds?: string[]; dagRunsLimit?: number; excludeStale?: boolean; + isFavorite?: boolean; lastDagRunState?: DagRunState; limit?: number; offset?: number; @@ -266,7 +287,7 @@ export const UseDagServiceGetDagsUiKeyFn = ({ dagDisplayNamePattern, dagIdPatter paused?: boolean; tags?: string[]; tagsMatchMode?: "any" | "all"; -} = {}, queryKey?: Array) => [useDagServiceGetDagsUiKey, ...(queryKey ?? [{ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }])]; +} = {}, queryKey?: Array) => [useDagServiceGetDagsUiKey, ...(queryKey ?? [{ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }])]; export type EventLogServiceGetEventLogDefaultResponse = Awaited>; export type EventLogServiceGetEventLogQueryResult = UseQueryResult; export const useEventLogServiceGetEventLogKey = "EventLogServiceGetEventLog"; @@ -599,6 +620,27 @@ export const UseDagVersionServiceGetDagVersionsKeyFn = ({ bundleName, bundleVers orderBy?: string; versionNumber?: number; }, queryKey?: Array) => [useDagVersionServiceGetDagVersionsKey, ...(queryKey ?? [{ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }])]; +export type HumanInTheLoopServiceGetHitlDetailDefaultResponse = Awaited>; +export type HumanInTheLoopServiceGetHitlDetailQueryResult = UseQueryResult; +export const useHumanInTheLoopServiceGetHitlDetailKey = "HumanInTheLoopServiceGetHitlDetail"; +export const UseHumanInTheLoopServiceGetHitlDetailKeyFn = ({ dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}, queryKey?: Array) => [useHumanInTheLoopServiceGetHitlDetailKey, ...(queryKey ?? [{ dagId, dagRunId, taskId }])]; +export type HumanInTheLoopServiceGetMappedTiHitlDetailDefaultResponse = Awaited>; +export type HumanInTheLoopServiceGetMappedTiHitlDetailQueryResult = UseQueryResult; +export const useHumanInTheLoopServiceGetMappedTiHitlDetailKey = "HumanInTheLoopServiceGetMappedTiHitlDetail"; +export const UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: Array) => [useHumanInTheLoopServiceGetMappedTiHitlDetailKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; +export type HumanInTheLoopServiceGetHitlDetailsDefaultResponse = Awaited>; +export type HumanInTheLoopServiceGetHitlDetailsQueryResult = UseQueryResult; +export const useHumanInTheLoopServiceGetHitlDetailsKey = "HumanInTheLoopServiceGetHitlDetails"; +export const UseHumanInTheLoopServiceGetHitlDetailsKeyFn = (queryKey?: Array) => [useHumanInTheLoopServiceGetHitlDetailsKey, ...(queryKey ?? [])]; export type MonitorServiceGetHealthDefaultResponse = Awaited>; export type MonitorServiceGetHealthQueryResult = UseQueryResult; export const useMonitorServiceGetHealthKey = "MonitorServiceGetHealth"; @@ -651,24 +693,6 @@ export const UseStructureServiceStructureDataKeyFn = ({ dagId, externalDependenc root?: string; versionNumber?: number; }, queryKey?: Array) => [useStructureServiceStructureDataKey, ...(queryKey ?? [{ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }])]; -export type GridServiceGridDataDefaultResponse = Awaited>; -export type GridServiceGridDataQueryResult = UseQueryResult; -export const useGridServiceGridDataKey = "GridServiceGridData"; -export const UseGridServiceGridDataKeyFn = ({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; -}, queryKey?: Array) => [useGridServiceGridDataKey, ...(queryKey ?? [{ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }])]; export type GridServiceGetDagStructureDefaultResponse = Awaited>; export type GridServiceGetDagStructureQueryResult = UseQueryResult; export const useGridServiceGetDagStructureKey = "GridServiceGetDagStructure"; @@ -704,6 +728,15 @@ export const useGridServiceGetLatestRunKey = "GridServiceGetLatestRun"; export const UseGridServiceGetLatestRunKeyFn = ({ dagId }: { dagId: string; }, queryKey?: Array) => [useGridServiceGetLatestRunKey, ...(queryKey ?? [{ dagId }])]; +export type CalendarServiceGetCalendarDefaultResponse = Awaited>; +export type CalendarServiceGetCalendarQueryResult = UseQueryResult; +export const useCalendarServiceGetCalendarKey = "CalendarServiceGetCalendar"; +export const UseCalendarServiceGetCalendarKeyFn = ({ dagId, granularity, logicalDateGte, logicalDateLte }: { + dagId: string; + granularity?: "hourly" | "daily"; + logicalDateGte?: string; + logicalDateLte?: string; +}, queryKey?: Array) => [useCalendarServiceGetCalendarKey, ...(queryKey ?? [{ dagId, granularity, logicalDateGte, logicalDateLte }])]; export type AssetServiceCreateAssetEventMutationResult = Awaited>; export type AssetServiceMaterializeAssetMutationResult = Awaited>; export type BackfillServiceCreateBackfillMutationResult = Awaited>; @@ -714,6 +747,8 @@ export type ConnectionServiceCreateDefaultConnectionsMutationResult = Awaited>; export type DagRunServiceTriggerDagRunMutationResult = Awaited>; export type DagRunServiceGetListDagRunsBatchMutationResult = Awaited>; +export type DagServiceFavoriteDagMutationResult = Awaited>; +export type DagServiceUnfavoriteDagMutationResult = Awaited>; export type TaskInstanceServiceGetTaskInstancesBatchMutationResult = Awaited>; export type TaskInstanceServicePostClearTaskInstancesMutationResult = Awaited>; export type PoolServicePostPoolMutationResult = Awaited>; @@ -738,6 +773,8 @@ export type PoolServiceBulkPoolsMutationResult = Awaited>; export type VariableServicePatchVariableMutationResult = Awaited>; export type VariableServiceBulkVariablesMutationResult = Awaited>; +export type HumanInTheLoopServiceUpdateHitlDetailMutationResult = Awaited>; +export type HumanInTheLoopServiceUpdateMappedTiHitlDetailMutationResult = Awaited>; export type AssetServiceDeleteAssetQueuedEventsMutationResult = Awaited>; export type AssetServiceDeleteDagAssetQueuedEventsMutationResult = Awaited>; export type AssetServiceDeleteDagAssetQueuedEventMutationResult = Awaited>; diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts index 5ba5f5f3620c7..d10b539687bf5 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts @@ -1,7 +1,7 @@ // generated with @7nohe/openapi-react-query-codegen@1.6.2 import { type QueryClient } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; /** @@ -272,10 +272,11 @@ export const ensureUseDagRunServiceGetUpstreamAssetEventsData = (queryClient: Qu * @param data.runType * @param data.state * @param data.orderBy +* @param data.runIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. * @returns DAGRunCollectionResponse Successful Response * @throws ApiError */ -export const ensureUseDagRunServiceGetDagRunsData = (queryClient: QueryClient, { dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { +export const ensureUseDagRunServiceGetDagRunsData = (queryClient: QueryClient, { dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { dagId: string; endDateGte?: string; endDateLte?: string; @@ -286,13 +287,48 @@ export const ensureUseDagRunServiceGetDagRunsData = (queryClient: QueryClient, { orderBy?: string; runAfterGte?: string; runAfterLte?: string; + runIdPattern?: string; runType?: string[]; startDateGte?: string; startDateLte?: string; state?: string[]; updatedAtGte?: string; updatedAtLte?: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }) }); +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }) }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const ensureUseDagRunServiceWaitDagRunUntilFinishedData = (queryClient: QueryClient, { dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}) => queryClient.ensureQueryData({ queryKey: Common.UseDagRunServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }), queryFn: () => DagRunService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const ensureUseExperimentalServiceWaitDagRunUntilFinishedData = (queryClient: QueryClient, { dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}) => queryClient.ensureQueryData({ queryKey: Common.UseExperimentalServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }), queryFn: () => ExperimentalService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) }); /** * Get Dag Source * Get source code using file token. @@ -402,10 +438,11 @@ export const ensureUseDagWarningServiceListDagWarningsData = (queryClient: Query * @param data.dagRunEndDateLte * @param data.dagRunState * @param data.orderBy +* @param data.isFavorite * @returns DAGCollectionResponse Successful Response * @throws ApiError */ -export const ensureUseDagServiceGetDagsData = (queryClient: QueryClient, { dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { +export const ensureUseDagServiceGetDagsData = (queryClient: QueryClient, { dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { dagDisplayNamePattern?: string; dagIdPattern?: string; dagRunEndDateGte?: string; @@ -414,6 +451,7 @@ export const ensureUseDagServiceGetDagsData = (queryClient: QueryClient, { dagDi dagRunStartDateLte?: string; dagRunState?: string[]; excludeStale?: boolean; + isFavorite?: boolean; lastDagRunState?: DagRunState; limit?: number; offset?: number; @@ -422,7 +460,7 @@ export const ensureUseDagServiceGetDagsData = (queryClient: QueryClient, { dagDi paused?: boolean; tags?: string[]; tagsMatchMode?: "any" | "all"; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDags({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDags({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); /** * Get Dag * Get basic information about a DAG. @@ -479,15 +517,17 @@ export const ensureUseDagServiceGetDagTagsData = (queryClient: QueryClient, { li * @param data.paused * @param data.lastDagRunState * @param data.orderBy +* @param data.isFavorite * @returns DAGWithLatestDagRunsCollectionResponse Successful Response * @throws ApiError */ -export const ensureUseDagServiceGetDagsUiData = (queryClient: QueryClient, { dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { +export const ensureUseDagServiceGetDagsUiData = (queryClient: QueryClient, { dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { dagDisplayNamePattern?: string; dagIdPattern?: string; dagIds?: string[]; dagRunsLimit?: number; excludeStale?: boolean; + isFavorite?: boolean; lastDagRunState?: DagRunState; limit?: number; offset?: number; @@ -496,7 +536,7 @@ export const ensureUseDagServiceGetDagsUiData = (queryClient: QueryClient, { dag paused?: boolean; tags?: string[]; tagsMatchMode?: "any" | "all"; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagsUiKeyFn({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDagsUi({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); +} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagsUiKeyFn({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDagsUi({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); /** * Get Event Log * @param data The data for the request. @@ -1132,6 +1172,45 @@ export const ensureUseDagVersionServiceGetDagVersionsData = (queryClient: QueryC versionNumber?: number; }) => queryClient.ensureQueryData({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) }); /** +* Get Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const ensureUseHumanInTheLoopServiceGetHitlDetailData = (queryClient: QueryClient, { dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ dagId, dagRunId, taskId }), queryFn: () => HumanInTheLoopService.getHitlDetail({ dagId, dagRunId, taskId }) }); +/** +* Get Mapped Ti Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const ensureUseHumanInTheLoopServiceGetMappedTiHitlDetailData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => HumanInTheLoopService.getMappedTiHitlDetail({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Hitl Details +* Get Human-in-the-loop details. +* @returns HITLDetailCollection Successful Response +* @throws ApiError +*/ +export const ensureUseHumanInTheLoopServiceGetHitlDetailsData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailsKeyFn(), queryFn: () => HumanInTheLoopService.getHitlDetails() }); +/** * Get Health * @returns HealthInfoResponse Successful Response * @throws ApiError @@ -1225,41 +1304,6 @@ export const ensureUseStructureServiceStructureDataData = (queryClient: QueryCli versionNumber?: number; }) => queryClient.ensureQueryData({ queryKey: Common.UseStructureServiceStructureDataKeyFn({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }), queryFn: () => StructureService.structureData({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }) }); /** -* Grid Data -* Return grid data. -* @param data The data for the request. -* @param data.dagId -* @param data.includeUpstream -* @param data.includeDownstream -* @param data.root -* @param data.offset -* @param data.runType -* @param data.state -* @param data.limit -* @param data.orderBy -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @returns GridResponse Successful Response -* @throws ApiError -*/ -export const ensureUseGridServiceGridDataData = (queryClient: QueryClient, { dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; -}) => queryClient.ensureQueryData({ queryKey: Common.UseGridServiceGridDataKeyFn({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }), queryFn: () => GridService.gridData({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }) }); -/** * Get Dag Structure * Return dag structure for grid view. * @param data The data for the request. @@ -1338,3 +1382,20 @@ export const ensureUseGridServiceGetGridTiSummariesData = (queryClient: QueryCli export const ensureUseGridServiceGetLatestRunData = (queryClient: QueryClient, { dagId }: { dagId: string; }) => queryClient.ensureQueryData({ queryKey: Common.UseGridServiceGetLatestRunKeyFn({ dagId }), queryFn: () => GridService.getLatestRun({ dagId }) }); +/** +* Get Calendar +* Get calendar data for a DAG including historical and planned DAG runs. +* @param data The data for the request. +* @param data.dagId +* @param data.granularity +* @param data.logicalDateGte +* @param data.logicalDateLte +* @returns CalendarTimeRangeCollectionResponse Successful Response +* @throws ApiError +*/ +export const ensureUseCalendarServiceGetCalendarData = (queryClient: QueryClient, { dagId, granularity, logicalDateGte, logicalDateLte }: { + dagId: string; + granularity?: "hourly" | "daily"; + logicalDateGte?: string; + logicalDateLte?: string; +}) => queryClient.ensureQueryData({ queryKey: Common.UseCalendarServiceGetCalendarKeyFn({ dagId, granularity, logicalDateGte, logicalDateLte }), queryFn: () => CalendarService.getCalendar({ dagId, granularity, logicalDateGte, logicalDateLte }) }); diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts index f878624e22eb4..2eab4c35b3ef4 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts @@ -1,7 +1,7 @@ // generated with @7nohe/openapi-react-query-codegen@1.6.2 import { type QueryClient } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; /** @@ -272,10 +272,11 @@ export const prefetchUseDagRunServiceGetUpstreamAssetEvents = (queryClient: Quer * @param data.runType * @param data.state * @param data.orderBy +* @param data.runIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. * @returns DAGRunCollectionResponse Successful Response * @throws ApiError */ -export const prefetchUseDagRunServiceGetDagRuns = (queryClient: QueryClient, { dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { +export const prefetchUseDagRunServiceGetDagRuns = (queryClient: QueryClient, { dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { dagId: string; endDateGte?: string; endDateLte?: string; @@ -286,13 +287,48 @@ export const prefetchUseDagRunServiceGetDagRuns = (queryClient: QueryClient, { d orderBy?: string; runAfterGte?: string; runAfterLte?: string; + runIdPattern?: string; runType?: string[]; startDateGte?: string; startDateLte?: string; state?: string[]; updatedAtGte?: string; updatedAtLte?: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }) }); +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }) }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const prefetchUseDagRunServiceWaitDagRunUntilFinished = (queryClient: QueryClient, { dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}) => queryClient.prefetchQuery({ queryKey: Common.UseDagRunServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }), queryFn: () => DagRunService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const prefetchUseExperimentalServiceWaitDagRunUntilFinished = (queryClient: QueryClient, { dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}) => queryClient.prefetchQuery({ queryKey: Common.UseExperimentalServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }), queryFn: () => ExperimentalService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) }); /** * Get Dag Source * Get source code using file token. @@ -402,10 +438,11 @@ export const prefetchUseDagWarningServiceListDagWarnings = (queryClient: QueryCl * @param data.dagRunEndDateLte * @param data.dagRunState * @param data.orderBy +* @param data.isFavorite * @returns DAGCollectionResponse Successful Response * @throws ApiError */ -export const prefetchUseDagServiceGetDags = (queryClient: QueryClient, { dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { +export const prefetchUseDagServiceGetDags = (queryClient: QueryClient, { dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { dagDisplayNamePattern?: string; dagIdPattern?: string; dagRunEndDateGte?: string; @@ -414,6 +451,7 @@ export const prefetchUseDagServiceGetDags = (queryClient: QueryClient, { dagDisp dagRunStartDateLte?: string; dagRunState?: string[]; excludeStale?: boolean; + isFavorite?: boolean; lastDagRunState?: DagRunState; limit?: number; offset?: number; @@ -422,7 +460,7 @@ export const prefetchUseDagServiceGetDags = (queryClient: QueryClient, { dagDisp paused?: boolean; tags?: string[]; tagsMatchMode?: "any" | "all"; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDags({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDags({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); /** * Get Dag * Get basic information about a DAG. @@ -479,15 +517,17 @@ export const prefetchUseDagServiceGetDagTags = (queryClient: QueryClient, { limi * @param data.paused * @param data.lastDagRunState * @param data.orderBy +* @param data.isFavorite * @returns DAGWithLatestDagRunsCollectionResponse Successful Response * @throws ApiError */ -export const prefetchUseDagServiceGetDagsUi = (queryClient: QueryClient, { dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { +export const prefetchUseDagServiceGetDagsUi = (queryClient: QueryClient, { dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { dagDisplayNamePattern?: string; dagIdPattern?: string; dagIds?: string[]; dagRunsLimit?: number; excludeStale?: boolean; + isFavorite?: boolean; lastDagRunState?: DagRunState; limit?: number; offset?: number; @@ -496,7 +536,7 @@ export const prefetchUseDagServiceGetDagsUi = (queryClient: QueryClient, { dagDi paused?: boolean; tags?: string[]; tagsMatchMode?: "any" | "all"; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagsUiKeyFn({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDagsUi({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); +} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagsUiKeyFn({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDagsUi({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); /** * Get Event Log * @param data The data for the request. @@ -1132,6 +1172,45 @@ export const prefetchUseDagVersionServiceGetDagVersions = (queryClient: QueryCli versionNumber?: number; }) => queryClient.prefetchQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) }); /** +* Get Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const prefetchUseHumanInTheLoopServiceGetHitlDetail = (queryClient: QueryClient, { dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ dagId, dagRunId, taskId }), queryFn: () => HumanInTheLoopService.getHitlDetail({ dagId, dagRunId, taskId }) }); +/** +* Get Mapped Ti Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const prefetchUseHumanInTheLoopServiceGetMappedTiHitlDetail = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => HumanInTheLoopService.getMappedTiHitlDetail({ dagId, dagRunId, mapIndex, taskId }) }); +/** +* Get Hitl Details +* Get Human-in-the-loop details. +* @returns HITLDetailCollection Successful Response +* @throws ApiError +*/ +export const prefetchUseHumanInTheLoopServiceGetHitlDetails = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailsKeyFn(), queryFn: () => HumanInTheLoopService.getHitlDetails() }); +/** * Get Health * @returns HealthInfoResponse Successful Response * @throws ApiError @@ -1225,41 +1304,6 @@ export const prefetchUseStructureServiceStructureData = (queryClient: QueryClien versionNumber?: number; }) => queryClient.prefetchQuery({ queryKey: Common.UseStructureServiceStructureDataKeyFn({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }), queryFn: () => StructureService.structureData({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }) }); /** -* Grid Data -* Return grid data. -* @param data The data for the request. -* @param data.dagId -* @param data.includeUpstream -* @param data.includeDownstream -* @param data.root -* @param data.offset -* @param data.runType -* @param data.state -* @param data.limit -* @param data.orderBy -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @returns GridResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseGridServiceGridData = (queryClient: QueryClient, { dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; -}) => queryClient.prefetchQuery({ queryKey: Common.UseGridServiceGridDataKeyFn({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }), queryFn: () => GridService.gridData({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }) }); -/** * Get Dag Structure * Return dag structure for grid view. * @param data The data for the request. @@ -1338,3 +1382,20 @@ export const prefetchUseGridServiceGetGridTiSummaries = (queryClient: QueryClien export const prefetchUseGridServiceGetLatestRun = (queryClient: QueryClient, { dagId }: { dagId: string; }) => queryClient.prefetchQuery({ queryKey: Common.UseGridServiceGetLatestRunKeyFn({ dagId }), queryFn: () => GridService.getLatestRun({ dagId }) }); +/** +* Get Calendar +* Get calendar data for a DAG including historical and planned DAG runs. +* @param data The data for the request. +* @param data.dagId +* @param data.granularity +* @param data.logicalDateGte +* @param data.logicalDateLte +* @returns CalendarTimeRangeCollectionResponse Successful Response +* @throws ApiError +*/ +export const prefetchUseCalendarServiceGetCalendar = (queryClient: QueryClient, { dagId, granularity, logicalDateGte, logicalDateLte }: { + dagId: string; + granularity?: "hourly" | "daily"; + logicalDateGte?: string; + logicalDateLte?: string; +}) => queryClient.prefetchQuery({ queryKey: Common.UseCalendarServiceGetCalendarKeyFn({ dagId, granularity, logicalDateGte, logicalDateLte }), queryFn: () => CalendarService.getCalendar({ dagId, granularity, logicalDateGte, logicalDateLte }) }); diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts index 3e3ba37467f1c..f47a175614451 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts @@ -1,8 +1,8 @@ // generated with @7nohe/openapi-react-query-codegen@1.6.2 import { UseMutationOptions, UseQueryOptions, useMutation, useQuery } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; -import { BackfillPostBody, BulkBody_BulkTaskInstanceBody_, BulkBody_ConnectionBody_, BulkBody_PoolBody_, BulkBody_VariableBody_, ClearTaskInstancesBody, ConnectionBody, CreateAssetEventsBody, DAGPatchBody, DAGRunClearBody, DAGRunPatchBody, DAGRunsBatchBody, DagRunState, DagWarningType, PatchTaskInstanceBody, PoolBody, PoolPatchBody, TaskInstancesBatchBody, TriggerDAGRunPostBody, VariableBody, XComCreateBody, XComUpdateBody } from "../requests/types.gen"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; +import { BackfillPostBody, BulkBody_BulkTaskInstanceBody_, BulkBody_ConnectionBody_, BulkBody_PoolBody_, BulkBody_VariableBody_, ClearTaskInstancesBody, ConnectionBody, CreateAssetEventsBody, DAGPatchBody, DAGRunClearBody, DAGRunPatchBody, DAGRunsBatchBody, DagRunState, DagWarningType, PatchTaskInstanceBody, PoolBody, PoolPatchBody, TaskInstancesBatchBody, TriggerDAGRunPostBody, UpdateHITLDetailPayload, VariableBody, XComCreateBody, XComUpdateBody } from "../requests/types.gen"; import * as Common from "./common"; /** * Get Assets @@ -272,10 +272,11 @@ export const useDagRunServiceGetUpstreamAssetEvents = = unknown[]>({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { +export const useDagRunServiceGetDagRuns = = unknown[]>({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { dagId: string; endDateGte?: string; endDateLte?: string; @@ -286,13 +287,48 @@ export const useDagRunServiceGetDagRuns = , "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }, queryKey), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }) as TData, ...options }); +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }, queryKey), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }) as TData, ...options }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useDagRunServiceWaitDagRunUntilFinished = = unknown[]>({ dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagRunServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }, queryKey), queryFn: () => DagRunService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) as TData, ...options }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useExperimentalServiceWaitDagRunUntilFinished = = unknown[]>({ dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseExperimentalServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }, queryKey), queryFn: () => ExperimentalService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) as TData, ...options }); /** * Get Dag Source * Get source code using file token. @@ -402,10 +438,11 @@ export const useDagWarningServiceListDagWarnings = = unknown[]>({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { +export const useDagServiceGetDags = = unknown[]>({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { dagDisplayNamePattern?: string; dagIdPattern?: string; dagRunEndDateGte?: string; @@ -414,6 +451,7 @@ export const useDagServiceGetDags = , "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDags({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDags({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); /** * Get Dag * Get basic information about a DAG. @@ -479,15 +517,17 @@ export const useDagServiceGetDagTags = = unknown[]>({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { +export const useDagServiceGetDagsUi = = unknown[]>({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { dagDisplayNamePattern?: string; dagIdPattern?: string; dagIds?: string[]; dagRunsLimit?: number; excludeStale?: boolean; + isFavorite?: boolean; lastDagRunState?: DagRunState; limit?: number; offset?: number; @@ -496,7 +536,7 @@ export const useDagServiceGetDagsUi = , "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagsUiKeyFn({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDagsUi({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagsUiKeyFn({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDagsUi({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); /** * Get Event Log * @param data The data for the request. @@ -1132,6 +1172,45 @@ export const useDagVersionServiceGetDagVersions = , "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) as TData, ...options }); /** +* Get Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetHitlDetail = = unknown[]>({ dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ dagId, dagRunId, taskId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlDetail({ dagId, dagRunId, taskId }) as TData, ...options }); +/** +* Get Mapped Ti Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetMappedTiHitlDetail = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => HumanInTheLoopService.getMappedTiHitlDetail({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Hitl Details +* Get Human-in-the-loop details. +* @returns HITLDetailCollection Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetHitlDetails = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailsKeyFn(queryKey), queryFn: () => HumanInTheLoopService.getHitlDetails() as TData, ...options }); +/** * Get Health * @returns HealthInfoResponse Successful Response * @throws ApiError @@ -1225,41 +1304,6 @@ export const useStructureServiceStructureData = , "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseStructureServiceStructureDataKeyFn({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }, queryKey), queryFn: () => StructureService.structureData({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }) as TData, ...options }); /** -* Grid Data -* Return grid data. -* @param data The data for the request. -* @param data.dagId -* @param data.includeUpstream -* @param data.includeDownstream -* @param data.root -* @param data.offset -* @param data.runType -* @param data.state -* @param data.limit -* @param data.orderBy -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @returns GridResponse Successful Response -* @throws ApiError -*/ -export const useGridServiceGridData = = unknown[]>({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseGridServiceGridDataKeyFn({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }, queryKey), queryFn: () => GridService.gridData({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }) as TData, ...options }); -/** * Get Dag Structure * Return dag structure for grid view. * @param data The data for the request. @@ -1339,6 +1383,23 @@ export const useGridServiceGetLatestRun = , "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseGridServiceGetLatestRunKeyFn({ dagId }, queryKey), queryFn: () => GridService.getLatestRun({ dagId }) as TData, ...options }); /** +* Get Calendar +* Get calendar data for a DAG including historical and planned DAG runs. +* @param data The data for the request. +* @param data.dagId +* @param data.granularity +* @param data.logicalDateGte +* @param data.logicalDateLte +* @returns CalendarTimeRangeCollectionResponse Successful Response +* @throws ApiError +*/ +export const useCalendarServiceGetCalendar = = unknown[]>({ dagId, granularity, logicalDateGte, logicalDateLte }: { + dagId: string; + granularity?: "hourly" | "daily"; + logicalDateGte?: string; + logicalDateLte?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseCalendarServiceGetCalendarKeyFn({ dagId, granularity, logicalDateGte, logicalDateLte }, queryKey), queryFn: () => CalendarService.getCalendar({ dagId, granularity, logicalDateGte, logicalDateLte }) as TData, ...options }); +/** * Create Asset Event * Create asset events. * @param data The data for the request. @@ -1476,6 +1537,32 @@ export const useDagRunServiceGetListDagRunsBatch = ({ mutationFn: ({ dagId, requestBody }) => DagRunService.getListDagRunsBatch({ dagId, requestBody }) as unknown as Promise, ...options }); /** +* Favorite Dag +* Mark the DAG as favorite. +* @param data The data for the request. +* @param data.dagId +* @returns void Successful Response +* @throws ApiError +*/ +export const useDagServiceFavoriteDag = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId }) => DagService.favoriteDag({ dagId }) as unknown as Promise, ...options }); +/** +* Unfavorite Dag +* Unmark the DAG as favorite. +* @param data The data for the request. +* @param data.dagId +* @returns void Successful Response +* @throws ApiError +*/ +export const useDagServiceUnfavoriteDag = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId }) => DagService.unfavoriteDag({ dagId }) as unknown as Promise, ...options }); +/** * Get Task Instances Batch * Get list of task instances. * @param data The data for the request. @@ -1941,6 +2028,53 @@ export const useVariableServiceBulkVariables = ({ mutationFn: ({ requestBody }) => VariableService.bulkVariables({ requestBody }) as unknown as Promise, ...options }); /** +* Update Hitl Detail +* Update a Human-in-the-loop detail. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.requestBody +* @returns HITLDetailResponse Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceUpdateHitlDetail = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody, taskId }) => HumanInTheLoopService.updateHitlDetail({ dagId, dagRunId, requestBody, taskId }) as unknown as Promise, ...options }); +/** +* Update Mapped Ti Hitl Detail +* Update a Human-in-the-loop detail. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @param data.requestBody +* @returns HITLDetailResponse Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceUpdateMappedTiHitlDetail = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId }) => HumanInTheLoopService.updateMappedTiHitlDetail({ dagId, dagRunId, mapIndex, requestBody, taskId }) as unknown as Promise, ...options }); +/** * Delete Asset Queued Events * Delete queued asset events for an asset. * @param data The data for the request. diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts index 2ebe843edb37e..2f9e37e78d6c2 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts @@ -1,7 +1,7 @@ // generated with @7nohe/openapi-react-query-codegen@1.6.2 import { UseQueryOptions, useSuspenseQuery } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; +import { AssetService, AuthLinksService, BackfillService, CalendarService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExperimentalService, ExtraLinksService, GridService, HumanInTheLoopService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; /** @@ -272,10 +272,11 @@ export const useDagRunServiceGetUpstreamAssetEventsSuspense = = unknown[]>({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { +export const useDagRunServiceGetDagRunsSuspense = = unknown[]>({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { dagId: string; endDateGte?: string; endDateLte?: string; @@ -286,13 +287,48 @@ export const useDagRunServiceGetDagRunsSuspense = , "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }, queryKey), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }) as TData, ...options }); +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }, queryKey), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runIdPattern, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }) as TData, ...options }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useDagRunServiceWaitDagRunUntilFinishedSuspense = = unknown[]>({ dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagRunServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }, queryKey), queryFn: () => DagRunService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) as TData, ...options }); +/** +* Experimental: Wait for a dag run to complete, and return task results if requested. +* 🚧 This is an experimental endpoint and may change or be removed without notice. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.interval Seconds to wait between dag run state checks +* @param data.result Collect result XCom from task. Can be set multiple times. +* @returns unknown Successful Response +* @throws ApiError +*/ +export const useExperimentalServiceWaitDagRunUntilFinishedSuspense = = unknown[]>({ dagId, dagRunId, interval, result }: { + dagId: string; + dagRunId: string; + interval: number; + result?: string[]; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseExperimentalServiceWaitDagRunUntilFinishedKeyFn({ dagId, dagRunId, interval, result }, queryKey), queryFn: () => ExperimentalService.waitDagRunUntilFinished({ dagId, dagRunId, interval, result }) as TData, ...options }); /** * Get Dag Source * Get source code using file token. @@ -402,10 +438,11 @@ export const useDagWarningServiceListDagWarningsSuspense = = unknown[]>({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { +export const useDagServiceGetDagsSuspense = = unknown[]>({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { dagDisplayNamePattern?: string; dagIdPattern?: string; dagRunEndDateGte?: string; @@ -414,6 +451,7 @@ export const useDagServiceGetDagsSuspense = , "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDags({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDags({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); /** * Get Dag * Get basic information about a DAG. @@ -479,15 +517,17 @@ export const useDagServiceGetDagTagsSuspense = = unknown[]>({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { +export const useDagServiceGetDagsUiSuspense = = unknown[]>({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { dagDisplayNamePattern?: string; dagIdPattern?: string; dagIds?: string[]; dagRunsLimit?: number; excludeStale?: boolean; + isFavorite?: boolean; lastDagRunState?: DagRunState; limit?: number; offset?: number; @@ -496,7 +536,7 @@ export const useDagServiceGetDagsUiSuspense = , "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagsUiKeyFn({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDagsUi({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); +} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagsUiKeyFn({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDagsUi({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, isFavorite, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); /** * Get Event Log * @param data The data for the request. @@ -1132,6 +1172,45 @@ export const useDagVersionServiceGetDagVersionsSuspense = , "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) as TData, ...options }); /** +* Get Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetHitlDetailSuspense = = unknown[]>({ dagId, dagRunId, taskId }: { + dagId: string; + dagRunId: string; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailKeyFn({ dagId, dagRunId, taskId }, queryKey), queryFn: () => HumanInTheLoopService.getHitlDetail({ dagId, dagRunId, taskId }) as TData, ...options }); +/** +* Get Mapped Ti Hitl Detail +* Get a Human-in-the-loop detail of a specific task instance. +* @param data The data for the request. +* @param data.dagId +* @param data.dagRunId +* @param data.taskId +* @param data.mapIndex +* @returns HITLDetail Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetMappedTiHitlDetailSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetMappedTiHitlDetailKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => HumanInTheLoopService.getMappedTiHitlDetail({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); +/** +* Get Hitl Details +* Get Human-in-the-loop details. +* @returns HITLDetailCollection Successful Response +* @throws ApiError +*/ +export const useHumanInTheLoopServiceGetHitlDetailsSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseHumanInTheLoopServiceGetHitlDetailsKeyFn(queryKey), queryFn: () => HumanInTheLoopService.getHitlDetails() as TData, ...options }); +/** * Get Health * @returns HealthInfoResponse Successful Response * @throws ApiError @@ -1225,41 +1304,6 @@ export const useStructureServiceStructureDataSuspense = , "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseStructureServiceStructureDataKeyFn({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }, queryKey), queryFn: () => StructureService.structureData({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }) as TData, ...options }); /** -* Grid Data -* Return grid data. -* @param data The data for the request. -* @param data.dagId -* @param data.includeUpstream -* @param data.includeDownstream -* @param data.root -* @param data.offset -* @param data.runType -* @param data.state -* @param data.limit -* @param data.orderBy -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @returns GridResponse Successful Response -* @throws ApiError -*/ -export const useGridServiceGridDataSuspense = = unknown[]>({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseGridServiceGridDataKeyFn({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }, queryKey), queryFn: () => GridService.gridData({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }) as TData, ...options }); -/** * Get Dag Structure * Return dag structure for grid view. * @param data The data for the request. @@ -1338,3 +1382,20 @@ export const useGridServiceGetGridTiSummariesSuspense = = unknown[]>({ dagId }: { dagId: string; }, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseGridServiceGetLatestRunKeyFn({ dagId }, queryKey), queryFn: () => GridService.getLatestRun({ dagId }) as TData, ...options }); +/** +* Get Calendar +* Get calendar data for a DAG including historical and planned DAG runs. +* @param data The data for the request. +* @param data.dagId +* @param data.granularity +* @param data.logicalDateGte +* @param data.logicalDateLte +* @returns CalendarTimeRangeCollectionResponse Successful Response +* @throws ApiError +*/ +export const useCalendarServiceGetCalendarSuspense = = unknown[]>({ dagId, granularity, logicalDateGte, logicalDateLte }: { + dagId: string; + granularity?: "hourly" | "daily"; + logicalDateGte?: string; + logicalDateLte?: string; +}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseCalendarServiceGetCalendarKeyFn({ dagId, granularity, logicalDateGte, logicalDateLte }, queryKey), queryFn: () => CalendarService.getCalendar({ dagId, granularity, logicalDateGte, logicalDateLte }) as TData, ...options }); diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts index a6202ec4e4c8b..2e31d61df5ae1 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -3408,6 +3408,162 @@ export const $FastAPIRootMiddlewareResponse = { description: 'Serializer for Plugin FastAPI root middleware responses.' } as const; +export const $HITLDetail = { + properties: { + ti_id: { + type: 'string', + title: 'Ti Id' + }, + options: { + items: { + type: 'string' + }, + type: 'array', + title: 'Options' + }, + subject: { + type: 'string', + title: 'Subject' + }, + body: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'Body' + }, + defaults: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Defaults' + }, + multiple: { + type: 'boolean', + title: 'Multiple', + default: false + }, + params: { + additionalProperties: true, + type: 'object', + title: 'Params' + }, + user_id: { + anyOf: [ + { + type: 'string' + }, + { + type: 'null' + } + ], + title: 'User Id' + }, + response_at: { + anyOf: [ + { + type: 'string', + format: 'date-time' + }, + { + type: 'null' + } + ], + title: 'Response At' + }, + chosen_options: { + anyOf: [ + { + items: { + type: 'string' + }, + type: 'array' + }, + { + type: 'null' + } + ], + title: 'Chosen Options' + }, + params_input: { + additionalProperties: true, + type: 'object', + title: 'Params Input' + }, + response_received: { + type: 'boolean', + title: 'Response Received', + default: false + } + }, + type: 'object', + required: ['ti_id', 'options', 'subject'], + title: 'HITLDetail', + description: 'Schema for Human-in-the-loop detail.' +} as const; + +export const $HITLDetailCollection = { + properties: { + hitl_details: { + items: { + '$ref': '#/components/schemas/HITLDetail' + }, + type: 'array', + title: 'Hitl Details' + }, + total_entries: { + type: 'integer', + title: 'Total Entries' + } + }, + type: 'object', + required: ['hitl_details', 'total_entries'], + title: 'HITLDetailCollection', + description: 'Schema for a collection of Human-in-the-loop details.' +} as const; + +export const $HITLDetailResponse = { + properties: { + user_id: { + type: 'string', + title: 'User Id' + }, + response_at: { + type: 'string', + format: 'date-time', + title: 'Response At' + }, + chosen_options: { + items: { + type: 'string' + }, + type: 'array', + title: 'Chosen Options' + }, + params_input: { + additionalProperties: true, + type: 'object', + title: 'Params Input' + } + }, + type: 'object', + required: ['user_id', 'response_at', 'chosen_options'], + title: 'HITLDetailResponse', + description: 'Response of updating a Human-in-the-loop detail.' +} as const; + export const $HTTPExceptionResponse = { properties: { detail: { @@ -4641,6 +4797,9 @@ export const $TaskInstanceResponse = { type: 'string', title: 'Dag Id' }, + dag_version: { + '$ref': '#/components/schemas/DagVersionResponse' + }, dag_run_id: { type: 'string', title: 'Dag Run Id' @@ -4886,20 +5045,10 @@ export const $TaskInstanceResponse = { type: 'null' } ] - }, - dag_version: { - anyOf: [ - { - '$ref': '#/components/schemas/DagVersionResponse' - }, - { - type: 'null' - } - ] } }, type: 'object', - required: ['id', 'task_id', 'dag_id', 'dag_run_id', 'map_index', 'logical_date', 'run_after', 'start_date', 'end_date', 'duration', 'state', 'try_number', 'max_tries', 'task_display_name', 'dag_display_name', 'hostname', 'unixname', 'pool', 'pool_slots', 'queue', 'priority_weight', 'operator', 'queued_when', 'scheduled_when', 'pid', 'executor', 'executor_config', 'note', 'rendered_map_index', 'trigger', 'triggerer_job', 'dag_version'], + required: ['id', 'task_id', 'dag_id', 'dag_version', 'dag_run_id', 'map_index', 'logical_date', 'run_after', 'start_date', 'end_date', 'duration', 'state', 'try_number', 'max_tries', 'task_display_name', 'dag_display_name', 'hostname', 'unixname', 'pool', 'pool_slots', 'queue', 'priority_weight', 'operator', 'queued_when', 'scheduled_when', 'pid', 'executor', 'executor_config', 'note', 'rendered_map_index', 'trigger', 'triggerer_job'], title: 'TaskInstanceResponse', description: 'TaskInstance serializer for responses.' } as const; @@ -5703,6 +5852,27 @@ export const $TriggererInfoResponse = { description: 'Triggerer info serializer for responses.' } as const; +export const $UpdateHITLDetailPayload = { + properties: { + chosen_options: { + items: { + type: 'string' + }, + type: 'array', + title: 'Chosen Options' + }, + params_input: { + additionalProperties: true, + type: 'object', + title: 'Params Input' + } + }, + type: 'object', + required: ['chosen_options'], + title: 'UpdateHITLDetailPayload', + description: 'Schema for updating the content of a Human-in-the-loop detail.' +} as const; + export const $ValidationError = { properties: { loc: { @@ -6124,6 +6294,49 @@ export const $BaseNodeResponse = { description: 'Base Node serializer for responses.' } as const; +export const $CalendarTimeRangeCollectionResponse = { + properties: { + total_entries: { + type: 'integer', + title: 'Total Entries' + }, + dag_runs: { + items: { + '$ref': '#/components/schemas/CalendarTimeRangeResponse' + }, + type: 'array', + title: 'Dag Runs' + } + }, + type: 'object', + required: ['total_entries', 'dag_runs'], + title: 'CalendarTimeRangeCollectionResponse', + description: 'Response model for calendar time range results.' +} as const; + +export const $CalendarTimeRangeResponse = { + properties: { + date: { + type: 'string', + format: 'date-time', + title: 'Date' + }, + state: { + type: 'string', + enum: ['queued', 'running', 'success', 'failed', 'planned'], + title: 'State' + }, + count: { + type: 'integer', + title: 'Count' + } + }, + type: 'object', + required: ['date', 'state', 'count'], + title: 'CalendarTimeRangeResponse', + description: 'Represents a summary of DAG runs for a specific calendar time range.' +} as const; + export const $ConfigResponse = { properties: { page_size: { @@ -6704,120 +6917,6 @@ export const $ExtraMenuItem = { title: 'ExtraMenuItem' } as const; -export const $GridDAGRunwithTIs = { - properties: { - dag_run_id: { - type: 'string', - title: 'Dag Run Id' - }, - queued_at: { - anyOf: [ - { - type: 'string', - format: 'date-time' - }, - { - type: 'null' - } - ], - title: 'Queued At' - }, - start_date: { - anyOf: [ - { - type: 'string', - format: 'date-time' - }, - { - type: 'null' - } - ], - title: 'Start Date' - }, - end_date: { - anyOf: [ - { - type: 'string', - format: 'date-time' - }, - { - type: 'null' - } - ], - title: 'End Date' - }, - run_after: { - type: 'string', - format: 'date-time', - title: 'Run After' - }, - state: { - '$ref': '#/components/schemas/DagRunState' - }, - run_type: { - '$ref': '#/components/schemas/DagRunType' - }, - logical_date: { - anyOf: [ - { - type: 'string', - format: 'date-time' - }, - { - type: 'null' - } - ], - title: 'Logical Date' - }, - data_interval_start: { - anyOf: [ - { - type: 'string', - format: 'date-time' - }, - { - type: 'null' - } - ], - title: 'Data Interval Start' - }, - data_interval_end: { - anyOf: [ - { - type: 'string', - format: 'date-time' - }, - { - type: 'null' - } - ], - title: 'Data Interval End' - }, - note: { - anyOf: [ - { - type: 'string' - }, - { - type: 'null' - } - ], - title: 'Note' - }, - task_instances: { - items: { - '$ref': '#/components/schemas/GridTaskInstanceSummary' - }, - type: 'array', - title: 'Task Instances' - } - }, - type: 'object', - required: ['dag_run_id', 'queued_at', 'start_date', 'end_date', 'run_after', 'state', 'run_type', 'logical_date', 'data_interval_start', 'data_interval_end', 'note', 'task_instances'], - title: 'GridDAGRunwithTIs', - description: 'DAG Run model for the Grid UI.' -} as const; - export const $GridNodeResponse = { properties: { id: { @@ -6872,22 +6971,6 @@ export const $GridNodeResponse = { description: 'Base Node serializer for responses.' } as const; -export const $GridResponse = { - properties: { - dag_runs: { - items: { - '$ref': '#/components/schemas/GridDAGRunwithTIs' - }, - type: 'array', - title: 'Dag Runs' - } - }, - type: 'object', - required: ['dag_runs'], - title: 'GridResponse', - description: 'Response model for the Grid UI.' -} as const; - export const $GridRunsResponse = { properties: { dag_id: { @@ -6953,14 +7036,7 @@ export const $GridRunsResponse = { '$ref': '#/components/schemas/DagRunType' }, duration: { - anyOf: [ - { - type: 'integer' - }, - { - type: 'null' - } - ], + type: 'integer', title: 'Duration', readOnly: true } @@ -6995,98 +7071,6 @@ export const $GridTISummaries = { description: 'DAG Run model for the Grid UI.' } as const; -export const $GridTaskInstanceSummary = { - properties: { - task_id: { - type: 'string', - title: 'Task Id' - }, - try_number: { - type: 'integer', - title: 'Try Number' - }, - start_date: { - anyOf: [ - { - type: 'string', - format: 'date-time' - }, - { - type: 'null' - } - ], - title: 'Start Date' - }, - end_date: { - anyOf: [ - { - type: 'string', - format: 'date-time' - }, - { - type: 'null' - } - ], - title: 'End Date' - }, - queued_dttm: { - anyOf: [ - { - type: 'string', - format: 'date-time' - }, - { - type: 'null' - } - ], - title: 'Queued Dttm' - }, - child_states: { - anyOf: [ - { - additionalProperties: { - type: 'integer' - }, - type: 'object' - }, - { - type: 'null' - } - ], - title: 'Child States' - }, - task_count: { - type: 'integer', - title: 'Task Count' - }, - state: { - anyOf: [ - { - '$ref': '#/components/schemas/TaskInstanceState' - }, - { - type: 'null' - } - ] - }, - note: { - anyOf: [ - { - type: 'string' - }, - { - type: 'null' - } - ], - title: 'Note' - } - }, - type: 'object', - required: ['task_id', 'try_number', 'start_date', 'end_date', 'queued_dttm', 'child_states', 'task_count', 'state', 'note'], - title: 'GridTaskInstanceSummary', - description: 'Task Instance Summary model for the Grid UI.' -} as const; - export const $HistoricalMetricDataResponse = { properties: { dag_run_types: { @@ -7153,9 +7137,6 @@ export const $LightGridTaskInstanceSummary = { additionalProperties: { type: 'integer' }, - propertyNames: { - '$ref': '#/components/schemas/TaskInstanceState' - }, type: 'object' }, { diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts index 02acbbca34361..b935e2042366d 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/services.gen.ts @@ -3,7 +3,7 @@ import type { CancelablePromise } from './core/CancelablePromise'; import { OpenAPI } from './core/OpenAPI'; import { request as __request } from './core/request'; -import type { GetAssetsData, GetAssetsResponse, GetAssetAliasesData, GetAssetAliasesResponse, GetAssetAliasData, GetAssetAliasResponse, GetAssetEventsData, GetAssetEventsResponse, CreateAssetEventData, CreateAssetEventResponse, MaterializeAssetData, MaterializeAssetResponse, GetAssetQueuedEventsData, GetAssetQueuedEventsResponse, DeleteAssetQueuedEventsData, DeleteAssetQueuedEventsResponse, GetAssetData, GetAssetResponse, GetDagAssetQueuedEventsData, GetDagAssetQueuedEventsResponse, DeleteDagAssetQueuedEventsData, DeleteDagAssetQueuedEventsResponse, GetDagAssetQueuedEventData, GetDagAssetQueuedEventResponse, DeleteDagAssetQueuedEventData, DeleteDagAssetQueuedEventResponse, NextRunAssetsData, NextRunAssetsResponse, ListBackfillsData, ListBackfillsResponse, CreateBackfillData, CreateBackfillResponse, GetBackfillData, GetBackfillResponse, PauseBackfillData, PauseBackfillResponse, UnpauseBackfillData, UnpauseBackfillResponse, CancelBackfillData, CancelBackfillResponse, CreateBackfillDryRunData, CreateBackfillDryRunResponse, ListBackfillsUiData, ListBackfillsUiResponse, DeleteConnectionData, DeleteConnectionResponse, GetConnectionData, GetConnectionResponse, PatchConnectionData, PatchConnectionResponse, GetConnectionsData, GetConnectionsResponse, PostConnectionData, PostConnectionResponse, BulkConnectionsData, BulkConnectionsResponse, TestConnectionData, TestConnectionResponse, CreateDefaultConnectionsResponse, HookMetaDataResponse, GetDagRunData, GetDagRunResponse, DeleteDagRunData, DeleteDagRunResponse, PatchDagRunData, PatchDagRunResponse, GetUpstreamAssetEventsData, GetUpstreamAssetEventsResponse, ClearDagRunData, ClearDagRunResponse, GetDagRunsData, GetDagRunsResponse, TriggerDagRunData, TriggerDagRunResponse, GetListDagRunsBatchData, GetListDagRunsBatchResponse, GetDagSourceData, GetDagSourceResponse, GetDagStatsData, GetDagStatsResponse, GetDagReportsData, GetDagReportsResponse, GetConfigData, GetConfigResponse, GetConfigValueData, GetConfigValueResponse, GetConfigsResponse, ListDagWarningsData, ListDagWarningsResponse, GetDagsData, GetDagsResponse, PatchDagsData, PatchDagsResponse, GetDagData, GetDagResponse, PatchDagData, PatchDagResponse, DeleteDagData, DeleteDagResponse, GetDagDetailsData, GetDagDetailsResponse, GetDagTagsData, GetDagTagsResponse, GetDagsUiData, GetDagsUiResponse, GetEventLogData, GetEventLogResponse, GetEventLogsData, GetEventLogsResponse, GetExtraLinksData, GetExtraLinksResponse, GetTaskInstanceData, GetTaskInstanceResponse, PatchTaskInstanceData, PatchTaskInstanceResponse, DeleteTaskInstanceData, DeleteTaskInstanceResponse, GetMappedTaskInstancesData, GetMappedTaskInstancesResponse, GetTaskInstanceDependenciesByMapIndexData, GetTaskInstanceDependenciesByMapIndexResponse, GetTaskInstanceDependenciesData, GetTaskInstanceDependenciesResponse, GetTaskInstanceTriesData, GetTaskInstanceTriesResponse, GetMappedTaskInstanceTriesData, GetMappedTaskInstanceTriesResponse, GetMappedTaskInstanceData, GetMappedTaskInstanceResponse, PatchTaskInstanceByMapIndexData, PatchTaskInstanceByMapIndexResponse, GetTaskInstancesData, GetTaskInstancesResponse, BulkTaskInstancesData, BulkTaskInstancesResponse, GetTaskInstancesBatchData, GetTaskInstancesBatchResponse, GetTaskInstanceTryDetailsData, GetTaskInstanceTryDetailsResponse, GetMappedTaskInstanceTryDetailsData, GetMappedTaskInstanceTryDetailsResponse, PostClearTaskInstancesData, PostClearTaskInstancesResponse, PatchTaskInstanceDryRunByMapIndexData, PatchTaskInstanceDryRunByMapIndexResponse, PatchTaskInstanceDryRunData, PatchTaskInstanceDryRunResponse, GetLogData, GetLogResponse, GetExternalLogUrlData, GetExternalLogUrlResponse, GetImportErrorData, GetImportErrorResponse, GetImportErrorsData, GetImportErrorsResponse, GetJobsData, GetJobsResponse, GetPluginsData, GetPluginsResponse, ImportErrorsResponse, DeletePoolData, DeletePoolResponse, GetPoolData, GetPoolResponse, PatchPoolData, PatchPoolResponse, GetPoolsData, GetPoolsResponse, PostPoolData, PostPoolResponse, BulkPoolsData, BulkPoolsResponse, GetProvidersData, GetProvidersResponse, GetXcomEntryData, GetXcomEntryResponse, UpdateXcomEntryData, UpdateXcomEntryResponse, GetXcomEntriesData, GetXcomEntriesResponse, CreateXcomEntryData, CreateXcomEntryResponse, GetTasksData, GetTasksResponse, GetTaskData, GetTaskResponse, DeleteVariableData, DeleteVariableResponse, GetVariableData, GetVariableResponse, PatchVariableData, PatchVariableResponse, GetVariablesData, GetVariablesResponse, PostVariableData, PostVariableResponse, BulkVariablesData, BulkVariablesResponse, ReparseDagFileData, ReparseDagFileResponse, GetDagVersionData, GetDagVersionResponse, GetDagVersionsData, GetDagVersionsResponse, GetHealthResponse, GetVersionResponse, LoginData, LoginResponse, LogoutData, LogoutResponse, GetAuthMenusResponse, GetDependenciesData, GetDependenciesResponse, HistoricalMetricsData, HistoricalMetricsResponse, DagStatsResponse2, StructureDataData, StructureDataResponse2, GridDataData, GridDataResponse, GetDagStructureData, GetDagStructureResponse, GetGridRunsData, GetGridRunsResponse, GetGridTiSummariesData, GetGridTiSummariesResponse, GetLatestRunData, GetLatestRunResponse } from './types.gen'; +import type { GetAssetsData, GetAssetsResponse, GetAssetAliasesData, GetAssetAliasesResponse, GetAssetAliasData, GetAssetAliasResponse, GetAssetEventsData, GetAssetEventsResponse, CreateAssetEventData, CreateAssetEventResponse, MaterializeAssetData, MaterializeAssetResponse, GetAssetQueuedEventsData, GetAssetQueuedEventsResponse, DeleteAssetQueuedEventsData, DeleteAssetQueuedEventsResponse, GetAssetData, GetAssetResponse, GetDagAssetQueuedEventsData, GetDagAssetQueuedEventsResponse, DeleteDagAssetQueuedEventsData, DeleteDagAssetQueuedEventsResponse, GetDagAssetQueuedEventData, GetDagAssetQueuedEventResponse, DeleteDagAssetQueuedEventData, DeleteDagAssetQueuedEventResponse, NextRunAssetsData, NextRunAssetsResponse, ListBackfillsData, ListBackfillsResponse, CreateBackfillData, CreateBackfillResponse, GetBackfillData, GetBackfillResponse, PauseBackfillData, PauseBackfillResponse, UnpauseBackfillData, UnpauseBackfillResponse, CancelBackfillData, CancelBackfillResponse, CreateBackfillDryRunData, CreateBackfillDryRunResponse, ListBackfillsUiData, ListBackfillsUiResponse, DeleteConnectionData, DeleteConnectionResponse, GetConnectionData, GetConnectionResponse, PatchConnectionData, PatchConnectionResponse, GetConnectionsData, GetConnectionsResponse, PostConnectionData, PostConnectionResponse, BulkConnectionsData, BulkConnectionsResponse, TestConnectionData, TestConnectionResponse, CreateDefaultConnectionsResponse, HookMetaDataResponse, GetDagRunData, GetDagRunResponse, DeleteDagRunData, DeleteDagRunResponse, PatchDagRunData, PatchDagRunResponse, GetUpstreamAssetEventsData, GetUpstreamAssetEventsResponse, ClearDagRunData, ClearDagRunResponse, GetDagRunsData, GetDagRunsResponse, TriggerDagRunData, TriggerDagRunResponse, WaitDagRunUntilFinishedData, WaitDagRunUntilFinishedResponse, GetListDagRunsBatchData, GetListDagRunsBatchResponse, GetDagSourceData, GetDagSourceResponse, GetDagStatsData, GetDagStatsResponse, GetDagReportsData, GetDagReportsResponse, GetConfigData, GetConfigResponse, GetConfigValueData, GetConfigValueResponse, GetConfigsResponse, ListDagWarningsData, ListDagWarningsResponse, GetDagsData, GetDagsResponse, PatchDagsData, PatchDagsResponse, GetDagData, GetDagResponse, PatchDagData, PatchDagResponse, DeleteDagData, DeleteDagResponse, GetDagDetailsData, GetDagDetailsResponse, FavoriteDagData, FavoriteDagResponse, UnfavoriteDagData, UnfavoriteDagResponse, GetDagTagsData, GetDagTagsResponse, GetDagsUiData, GetDagsUiResponse, GetEventLogData, GetEventLogResponse, GetEventLogsData, GetEventLogsResponse, GetExtraLinksData, GetExtraLinksResponse, GetTaskInstanceData, GetTaskInstanceResponse, PatchTaskInstanceData, PatchTaskInstanceResponse, DeleteTaskInstanceData, DeleteTaskInstanceResponse, GetMappedTaskInstancesData, GetMappedTaskInstancesResponse, GetTaskInstanceDependenciesByMapIndexData, GetTaskInstanceDependenciesByMapIndexResponse, GetTaskInstanceDependenciesData, GetTaskInstanceDependenciesResponse, GetTaskInstanceTriesData, GetTaskInstanceTriesResponse, GetMappedTaskInstanceTriesData, GetMappedTaskInstanceTriesResponse, GetMappedTaskInstanceData, GetMappedTaskInstanceResponse, PatchTaskInstanceByMapIndexData, PatchTaskInstanceByMapIndexResponse, GetTaskInstancesData, GetTaskInstancesResponse, BulkTaskInstancesData, BulkTaskInstancesResponse, GetTaskInstancesBatchData, GetTaskInstancesBatchResponse, GetTaskInstanceTryDetailsData, GetTaskInstanceTryDetailsResponse, GetMappedTaskInstanceTryDetailsData, GetMappedTaskInstanceTryDetailsResponse, PostClearTaskInstancesData, PostClearTaskInstancesResponse, PatchTaskInstanceDryRunByMapIndexData, PatchTaskInstanceDryRunByMapIndexResponse, PatchTaskInstanceDryRunData, PatchTaskInstanceDryRunResponse, GetLogData, GetLogResponse, GetExternalLogUrlData, GetExternalLogUrlResponse, GetImportErrorData, GetImportErrorResponse, GetImportErrorsData, GetImportErrorsResponse, GetJobsData, GetJobsResponse, GetPluginsData, GetPluginsResponse, ImportErrorsResponse, DeletePoolData, DeletePoolResponse, GetPoolData, GetPoolResponse, PatchPoolData, PatchPoolResponse, GetPoolsData, GetPoolsResponse, PostPoolData, PostPoolResponse, BulkPoolsData, BulkPoolsResponse, GetProvidersData, GetProvidersResponse, GetXcomEntryData, GetXcomEntryResponse, UpdateXcomEntryData, UpdateXcomEntryResponse, GetXcomEntriesData, GetXcomEntriesResponse, CreateXcomEntryData, CreateXcomEntryResponse, GetTasksData, GetTasksResponse, GetTaskData, GetTaskResponse, DeleteVariableData, DeleteVariableResponse, GetVariableData, GetVariableResponse, PatchVariableData, PatchVariableResponse, GetVariablesData, GetVariablesResponse, PostVariableData, PostVariableResponse, BulkVariablesData, BulkVariablesResponse, ReparseDagFileData, ReparseDagFileResponse, GetDagVersionData, GetDagVersionResponse, GetDagVersionsData, GetDagVersionsResponse, UpdateHitlDetailData, UpdateHitlDetailResponse, GetHitlDetailData, GetHitlDetailResponse, UpdateMappedTiHitlDetailData, UpdateMappedTiHitlDetailResponse, GetMappedTiHitlDetailData, GetMappedTiHitlDetailResponse, GetHitlDetailsResponse, GetHealthResponse, GetVersionResponse, LoginData, LoginResponse, LogoutData, LogoutResponse, GetAuthMenusResponse, GetDependenciesData, GetDependenciesResponse, HistoricalMetricsData, HistoricalMetricsResponse, DagStatsResponse2, StructureDataData, StructureDataResponse2, GetDagStructureData, GetDagStructureResponse, GetGridRunsData, GetGridRunsResponse, GetGridTiSummariesData, GetGridTiSummariesResponse, GetLatestRunData, GetLatestRunResponse, GetCalendarData, GetCalendarResponse } from './types.gen'; export class AssetService { /** @@ -983,6 +983,7 @@ export class DagRunService { * @param data.runType * @param data.state * @param data.orderBy + * @param data.runIdPattern SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. * @returns DAGRunCollectionResponse Successful Response * @throws ApiError */ @@ -1008,7 +1009,8 @@ export class DagRunService { updated_at_lte: data.updatedAtLte, run_type: data.runType, state: data.state, - order_by: data.orderBy + order_by: data.orderBy, + run_id_pattern: data.runIdPattern }, errors: { 401: 'Unauthorized', @@ -1048,6 +1050,38 @@ export class DagRunService { }); } + /** + * Experimental: Wait for a dag run to complete, and return task results if requested. + * 🚧 This is an experimental endpoint and may change or be removed without notice. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.interval Seconds to wait between dag run state checks + * @param data.result Collect result XCom from task. Can be set multiple times. + * @returns unknown Successful Response + * @throws ApiError + */ + public static waitDagRunUntilFinished(data: WaitDagRunUntilFinishedData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/wait', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId + }, + query: { + interval: data.interval, + result: data.result + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + /** * Get List Dag Runs Batch * Get a list of DAG Runs. @@ -1077,6 +1111,41 @@ export class DagRunService { } +export class ExperimentalService { + /** + * Experimental: Wait for a dag run to complete, and return task results if requested. + * 🚧 This is an experimental endpoint and may change or be removed without notice. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.interval Seconds to wait between dag run state checks + * @param data.result Collect result XCom from task. Can be set multiple times. + * @returns unknown Successful Response + * @throws ApiError + */ + public static waitDagRunUntilFinished(data: WaitDagRunUntilFinishedData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/wait', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId + }, + query: { + interval: data.interval, + result: data.result + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + +} + export class DagSourceService { /** * Get Dag Source @@ -1301,6 +1370,7 @@ export class DagService { * @param data.dagRunEndDateLte * @param data.dagRunState * @param data.orderBy + * @param data.isFavorite * @returns DAGCollectionResponse Successful Response * @throws ApiError */ @@ -1324,7 +1394,8 @@ export class DagService { dag_run_end_date_gte: data.dagRunEndDateGte, dag_run_end_date_lte: data.dagRunEndDateLte, dag_run_state: data.dagRunState, - order_by: data.orderBy + order_by: data.orderBy, + is_favorite: data.isFavorite }, errors: { 401: 'Unauthorized', @@ -1485,6 +1556,55 @@ export class DagService { }); } + /** + * Favorite Dag + * Mark the DAG as favorite. + * @param data The data for the request. + * @param data.dagId + * @returns void Successful Response + * @throws ApiError + */ + public static favoriteDag(data: FavoriteDagData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/dags/{dag_id}/favorite', + path: { + dag_id: data.dagId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Unfavorite Dag + * Unmark the DAG as favorite. + * @param data The data for the request. + * @param data.dagId + * @returns void Successful Response + * @throws ApiError + */ + public static unfavoriteDag(data: UnfavoriteDagData): CancelablePromise { + return __request(OpenAPI, { + method: 'POST', + url: '/api/v2/dags/{dag_id}/unfavorite', + path: { + dag_id: data.dagId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + /** * Get Dag Tags * Get all DAG tags. @@ -1531,6 +1651,7 @@ export class DagService { * @param data.paused * @param data.lastDagRunState * @param data.orderBy + * @param data.isFavorite * @returns DAGWithLatestDagRunsCollectionResponse Successful Response * @throws ApiError */ @@ -1551,7 +1672,8 @@ export class DagService { exclude_stale: data.excludeStale, paused: data.paused, last_dag_run_state: data.lastDagRunState, - order_by: data.orderBy + order_by: data.orderBy, + is_favorite: data.isFavorite }, errors: { 422: 'Validation Error' @@ -3238,6 +3360,150 @@ export class DagVersionService { } +export class HumanInTheLoopService { + /** + * Update Hitl Detail + * Update a Human-in-the-loop detail. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.requestBody + * @returns HITLDetailResponse Successful Response + * @throws ApiError + */ + public static updateHitlDetail(data: UpdateHitlDetailData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Get Hitl Detail + * Get a Human-in-the-loop detail of a specific task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns HITLDetail Successful Response + * @throws ApiError + */ + public static getHitlDetail(data: GetHitlDetailData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Update Mapped Ti Hitl Detail + * Update a Human-in-the-loop detail. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @param data.requestBody + * @returns HITLDetailResponse Successful Response + * @throws ApiError + */ + public static updateMappedTiHitlDetail(data: UpdateMappedTiHitlDetailData): CancelablePromise { + return __request(OpenAPI, { + method: 'PATCH', + url: '/api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}/{map_index}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex + }, + body: data.requestBody, + mediaType: 'application/json', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 409: 'Conflict', + 422: 'Validation Error' + } + }); + } + + /** + * Get Mapped Ti Hitl Detail + * Get a Human-in-the-loop detail of a specific task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns HITLDetail Successful Response + * @throws ApiError + */ + public static getMappedTiHitlDetail(data: GetMappedTiHitlDetailData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}/{map_index}', + path: { + dag_id: data.dagId, + dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex + }, + errors: { + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 422: 'Validation Error' + } + }); + } + + /** + * Get Hitl Details + * Get Human-in-the-loop details. + * @returns HITLDetailCollection Successful Response + * @throws ApiError + */ + public static getHitlDetails(): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/api/v2/hitl-details/', + errors: { + 401: 'Unauthorized', + 403: 'Forbidden' + } + }); + } + +} + export class MonitorService { /** * Get Health @@ -3432,55 +3698,6 @@ export class StructureService { } export class GridService { - /** - * Grid Data - * Return grid data. - * @param data The data for the request. - * @param data.dagId - * @param data.includeUpstream - * @param data.includeDownstream - * @param data.root - * @param data.offset - * @param data.runType - * @param data.state - * @param data.limit - * @param data.orderBy - * @param data.runAfterGte - * @param data.runAfterLte - * @param data.logicalDateGte - * @param data.logicalDateLte - * @returns GridResponse Successful Response - * @throws ApiError - */ - public static gridData(data: GridDataData): CancelablePromise { - return __request(OpenAPI, { - method: 'GET', - url: '/ui/grid/{dag_id}', - path: { - dag_id: data.dagId - }, - query: { - include_upstream: data.includeUpstream, - include_downstream: data.includeDownstream, - root: data.root, - offset: data.offset, - run_type: data.runType, - state: data.state, - limit: data.limit, - order_by: data.orderBy, - run_after_gte: data.runAfterGte, - run_after_lte: data.runAfterLte, - logical_date_gte: data.logicalDateGte, - logical_date_lte: data.logicalDateLte - }, - errors: { - 400: 'Bad Request', - 404: 'Not Found', - 422: 'Validation Error' - } - }); - } - /** * Get Dag Structure * Return dag structure for grid view. @@ -3612,4 +3829,36 @@ export class GridService { }); } +} + +export class CalendarService { + /** + * Get Calendar + * Get calendar data for a DAG including historical and planned DAG runs. + * @param data The data for the request. + * @param data.dagId + * @param data.granularity + * @param data.logicalDateGte + * @param data.logicalDateLte + * @returns CalendarTimeRangeCollectionResponse Successful Response + * @throws ApiError + */ + public static getCalendar(data: GetCalendarData): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/ui/calendar/{dag_id}', + path: { + dag_id: data.dagId + }, + query: { + granularity: data.granularity, + logical_date_gte: data.logicalDateGte, + logical_date_lte: data.logicalDateLte + }, + errors: { + 422: 'Validation Error' + } + }); + } + } \ No newline at end of file diff --git a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts index f28b965b25a53..591ce7884373c 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/requests/types.gen.ts @@ -917,6 +917,48 @@ export type FastAPIRootMiddlewareResponse = { [key: string]: unknown | string; }; +/** + * Schema for Human-in-the-loop detail. + */ +export type HITLDetail = { + ti_id: string; + options: Array<(string)>; + subject: string; + body?: string | null; + defaults?: Array<(string)> | null; + multiple?: boolean; + params?: { + [key: string]: unknown; + }; + user_id?: string | null; + response_at?: string | null; + chosen_options?: Array<(string)> | null; + params_input?: { + [key: string]: unknown; + }; + response_received?: boolean; +}; + +/** + * Schema for a collection of Human-in-the-loop details. + */ +export type HITLDetailCollection = { + hitl_details: Array; + total_entries: number; +}; + +/** + * Response of updating a Human-in-the-loop detail. + */ +export type HITLDetailResponse = { + user_id: string; + response_at: string; + chosen_options: Array<(string)>; + params_input?: { + [key: string]: unknown; + }; +}; + /** * HTTPException Model used for error response. */ @@ -1260,6 +1302,7 @@ export type TaskInstanceResponse = { id: string; task_id: string; dag_id: string; + dag_version: DagVersionResponse; dag_run_id: string; map_index: number; logical_date: string | null; @@ -1291,7 +1334,6 @@ export type TaskInstanceResponse = { }; trigger: TriggerResponse | null; triggerer_job: JobResponse | null; - dag_version: DagVersionResponse | null; }; /** @@ -1429,6 +1471,16 @@ export type TriggererInfoResponse = { latest_triggerer_heartbeat: string | null; }; +/** + * Schema for updating the content of a Human-in-the-loop detail. + */ +export type UpdateHITLDetailPayload = { + chosen_options: Array<(string)>; + params_input?: { + [key: string]: unknown; + }; +}; + export type ValidationError = { loc: Array<(string | number)>; msg: string; @@ -1566,6 +1618,25 @@ export type BaseNodeResponse = { export type type = 'join' | 'task' | 'asset-condition' | 'asset' | 'asset-alias' | 'asset-name-ref' | 'asset-uri-ref' | 'dag' | 'sensor' | 'trigger'; +/** + * Response model for calendar time range results. + */ +export type CalendarTimeRangeCollectionResponse = { + total_entries: number; + dag_runs: Array; +}; + +/** + * Represents a summary of DAG runs for a specific calendar time range. + */ +export type CalendarTimeRangeResponse = { + date: string; + state: 'queued' | 'running' | 'success' | 'failed' | 'planned'; + count: number; +}; + +export type state = 'queued' | 'running' | 'success' | 'failed' | 'planned'; + /** * configuration serializer. */ @@ -1711,24 +1782,6 @@ export type ExtraMenuItem = { href: string; }; -/** - * DAG Run model for the Grid UI. - */ -export type GridDAGRunwithTIs = { - dag_run_id: string; - queued_at: string | null; - start_date: string | null; - end_date: string | null; - run_after: string; - state: DagRunState; - run_type: DagRunType; - logical_date: string | null; - data_interval_start: string | null; - data_interval_end: string | null; - note: string | null; - task_instances: Array; -}; - /** * Base Node serializer for responses. */ @@ -1740,13 +1793,6 @@ export type GridNodeResponse = { setup_teardown_type?: 'setup' | 'teardown' | null; }; -/** - * Response model for the Grid UI. - */ -export type GridResponse = { - dag_runs: Array; -}; - /** * Base Node serializer for responses. */ @@ -1759,7 +1805,7 @@ export type GridRunsResponse = { run_after: string; state: TaskInstanceState | null; run_type: DagRunType; - readonly duration: number | null; + readonly duration: number; }; /** @@ -1771,23 +1817,6 @@ export type GridTISummaries = { task_instances: Array; }; -/** - * Task Instance Summary model for the Grid UI. - */ -export type GridTaskInstanceSummary = { - task_id: string; - try_number: number; - start_date: string | null; - end_date: string | null; - queued_dttm: string | null; - child_states: { - [key: string]: (number); -} | null; - task_count: number; - state: TaskInstanceState | null; - note: string | null; -}; - /** * Historical Metric Data serializer for responses. */ @@ -2176,6 +2205,10 @@ export type GetDagRunsData = { orderBy?: string; runAfterGte?: string | null; runAfterLte?: string | null; + /** + * SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. + */ + runIdPattern?: string | null; runType?: Array<(string)>; startDateGte?: string | null; startDateLte?: string | null; @@ -2193,6 +2226,21 @@ export type TriggerDagRunData = { export type TriggerDagRunResponse = DAGRunResponse; +export type WaitDagRunUntilFinishedData = { + dagId: string; + dagRunId: string; + /** + * Seconds to wait between dag run state checks + */ + interval: number; + /** + * Collect result XCom from task. Can be set multiple times. + */ + result?: Array<(string)> | null; +}; + +export type WaitDagRunUntilFinishedResponse = unknown; + export type GetListDagRunsBatchData = { dagId: "~"; requestBody: DAGRunsBatchBody; @@ -2262,6 +2310,7 @@ export type GetDagsData = { dagRunStartDateLte?: string | null; dagRunState?: Array<(string)>; excludeStale?: boolean; + isFavorite?: boolean | null; lastDagRunState?: DagRunState | null; limit?: number; offset?: number; @@ -2318,6 +2367,18 @@ export type GetDagDetailsData = { export type GetDagDetailsResponse = DAGDetailsResponse; +export type FavoriteDagData = { + dagId: string; +}; + +export type FavoriteDagResponse = void; + +export type UnfavoriteDagData = { + dagId: string; +}; + +export type UnfavoriteDagResponse = void; + export type GetDagTagsData = { limit?: number; offset?: number; @@ -2342,6 +2403,7 @@ export type GetDagsUiData = { dagIds?: Array<(string)> | null; dagRunsLimit?: number; excludeStale?: boolean; + isFavorite?: boolean | null; lastDagRunState?: DagRunState | null; limit?: number; offset?: number; @@ -2837,6 +2899,44 @@ export type GetDagVersionsData = { export type GetDagVersionsResponse = DAGVersionCollectionResponse; +export type UpdateHitlDetailData = { + dagId: string; + dagRunId: string; + requestBody: UpdateHITLDetailPayload; + taskId: string; +}; + +export type UpdateHitlDetailResponse = HITLDetailResponse; + +export type GetHitlDetailData = { + dagId: string; + dagRunId: string; + taskId: string; +}; + +export type GetHitlDetailResponse = HITLDetail; + +export type UpdateMappedTiHitlDetailData = { + dagId: string; + dagRunId: string; + mapIndex: number; + requestBody: UpdateHITLDetailPayload; + taskId: string; +}; + +export type UpdateMappedTiHitlDetailResponse = HITLDetailResponse; + +export type GetMappedTiHitlDetailData = { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}; + +export type GetMappedTiHitlDetailResponse = HITLDetail; + +export type GetHitlDetailsResponse = HITLDetailCollection; + export type GetHealthResponse = HealthInfoResponse; export type GetVersionResponse = VersionInfo; @@ -2881,24 +2981,6 @@ export type StructureDataData = { export type StructureDataResponse2 = StructureDataResponse; -export type GridDataData = { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string | null; - logicalDateLte?: string | null; - offset?: number; - orderBy?: string; - root?: string | null; - runAfterGte?: string | null; - runAfterLte?: string | null; - runType?: Array<(string)>; - state?: Array<(string)>; -}; - -export type GridDataResponse = GridResponse; - export type GetDagStructureData = { dagId: string; limit?: number; @@ -2934,6 +3016,15 @@ export type GetLatestRunData = { export type GetLatestRunResponse = LatestRunResponse | null; +export type GetCalendarData = { + dagId: string; + granularity?: 'hourly' | 'daily'; + logicalDateGte?: string | null; + logicalDateLte?: string | null; +}; + +export type GetCalendarResponse = CalendarTimeRangeCollectionResponse; + export type $OpenApiTs = { '/api/v2/assets': { get: { @@ -3933,6 +4024,33 @@ export type $OpenApiTs = { }; }; }; + '/api/v2/dags/{dag_id}/dagRuns/{dag_run_id}/wait': { + get: { + req: WaitDagRunUntilFinishedData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; '/api/v2/dags/{dag_id}/dagRuns/list': { post: { req: GetListDagRunsBatchData; @@ -4324,6 +4442,64 @@ export type $OpenApiTs = { }; }; }; + '/api/v2/dags/{dag_id}/favorite': { + post: { + req: FavoriteDagData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + '/api/v2/dags/{dag_id}/unfavorite': { + post: { + req: UnfavoriteDagData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; '/api/v2/dagTags': { get: { req: GetDagTagsData; @@ -5707,6 +5883,136 @@ export type $OpenApiTs = { }; }; }; + '/api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}': { + patch: { + req: UpdateHitlDetailData; + res: { + /** + * Successful Response + */ + 200: HITLDetailResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + get: { + req: GetHitlDetailData; + res: { + /** + * Successful Response + */ + 200: HITLDetail; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + '/api/v2/hitl-details/{dag_id}/{dag_run_id}/{task_id}/{map_index}': { + patch: { + req: UpdateMappedTiHitlDetailData; + res: { + /** + * Successful Response + */ + 200: HITLDetailResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + get: { + req: GetMappedTiHitlDetailData; + res: { + /** + * Successful Response + */ + 200: HITLDetail; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + '/api/v2/hitl-details/': { + get: { + res: { + /** + * Successful Response + */ + 200: HITLDetailCollection; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + }; + }; + }; '/api/v2/monitor/health': { get: { res: { @@ -5842,29 +6148,6 @@ export type $OpenApiTs = { }; }; }; - '/ui/grid/{dag_id}': { - get: { - req: GridDataData; - res: { - /** - * Successful Response - */ - 200: GridResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; - /** - * Not Found - */ - 404: HTTPExceptionResponse; - /** - * Validation Error - */ - 422: HTTPValidationError; - }; - }; - }; '/ui/grid/structure/{dag_id}': { get: { req: GetDagStructureData; @@ -5957,4 +6240,19 @@ export type $OpenApiTs = { }; }; }; + '/ui/calendar/{dag_id}': { + get: { + req: GetCalendarData; + res: { + /** + * Successful Response + */ + 200: CalendarTimeRangeCollectionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; }; \ No newline at end of file diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/README.md b/airflow-core/src/airflow/ui/public/i18n/locales/de/README.md index c71952d7217c3..f0e0767e70d26 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/de/README.md +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/README.md @@ -46,6 +46,9 @@ Die folgenden Begriffe wurden bewusst nicht aus dem Englischen übersetzt: "Workflow" wäre vermutlich eher irreführend und es ist anzunehmen dass die Nutzer von Airflow den Begriff zuordnen können. Der Begriff `Dag` wird in der deutschen Übersetzung im Neutrum verwendet. +- Log level "CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG" in dag.json + Abschnitt "logs": Diese Begriffe werden in den feststehenden Logs im Text + auch ausgegeben, deswegen werden sie nicht in das Deutsche übertragen. (Derzeit keine weiteren feststehenden Begriffe) diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/assets.json index 23ad46c86ae3c..6804f32ffe598 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/de/assets.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/assets.json @@ -25,5 +25,6 @@ "lastAssetEvent": "Letztes Ereignis zu Datenset (Asset)", "name": "Name", "producingTasks": "Produzierende Tasks", + "scheduledDags": "Geplante Dags", "searchPlaceholder": "Datenset (Asset) suchen" } diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/common.json index bf8176e5760a2..3a254f64c63ae 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/de/common.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/common.json @@ -78,6 +78,11 @@ "notFound": "Seite nicht gefunden", "title": "Fehler" }, + "expand": { + "collapse": "Einklappen", + "expand": "Ausklappen", + "tooltip": "Tastenkombination {{hotkey}} zum Ein-/Ausklappen drücken" + }, "expression": { "all": "Alle", "and": "UND", @@ -103,6 +108,7 @@ "dags": "Dags", "docs": "Doku", "home": "Start", + "legacyFabViews": "Alte Ansichten", "plugins": "Plug-ins", "security": "Sicherheit" }, @@ -270,6 +276,7 @@ } } }, + "total": "Gesamt {{state}}", "triggered": "Angestoßen", "tryNumber": "Versuch Nummer", "user": "Benutzer", diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/components.json index 765fb85ae73bd..95abe63ea6367 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/de/components.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/components.json @@ -2,14 +2,17 @@ "backfill": { "affected_one": "1 Lauf wird ausgelöst.", "affected_other": "{{count}} Läufe werden ausgelöst.", - "affectedNone": "Keine Läufe entsprechend en Kriterien.", + "affectedNone": "Keine Läufe entsprechen den Kriterien.", + "allRuns": "Alle Läufe", "backwards": "Verarbeitung in Rückwärtiger Reihenfolge", "dateRange": "Datumsbereich", "dateRangeFrom": "Von", "dateRangeTo": "Bis", "errorStartDateBeforeEndDate": "Das Startdatum muss vor dem Enddatum liegen.", "maxRuns": "Anzahl aktiver paralleler Läufe", - "reprocessBehavior": "Aufffüll-Modus", + "missingAndErroredRuns": "Fehlende und fehlgeschlagene Läufe", + "missingRuns": "Fehlende Läufe", + "reprocessBehavior": "Auffüll-Modus", "run": "Auffüllung starten", "selectDescription": "Dieses Dag für einen Datumsbereich in der Vergangenheit ausführen", "selectLabel": "Auffüllen", diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/dag.json index 3a6a8ce310426..98295a47cbc74 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/de/dag.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/dag.json @@ -25,12 +25,17 @@ "logs": { "allLevels": "Alle Protokoll-Stufen", "allSources": "Alle Quellen", + "critical": "CRITICAL", + "debug": "DEBUG", + "error": "ERROR", "fullscreen": { "button": "Vollbild", "tooltip": "Taste {{hotkey}} für Vollbildmodus" }, + "info": "INFO", "noTryNumber": "Keine Versuchsnummer", - "viewInExternal": "Protokoll in {{name}} (Versuch {{attempt}}) ansehen" + "viewInExternal": "Protokoll in {{name}} (Versuch {{attempt}}) ansehen", + "warning": "WARNING" }, "overview": { "buttons": { diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/dags.json index d79a944687da3..547d0d878f181 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/de/dags.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/dags.json @@ -6,14 +6,21 @@ "warning": "Diese Aktion löscht alle Metadaten zu diesem Dag mit allen Läufen und Task Instanzen." } }, + "favoriteDag": "Dag als Favorit hinzufügen", "filters": { "allRunTypes": "Alle Arten von Läufen", "allStates": "Alle Stati", + "favorite": { + "all": "Alle", + "favorite": "Favorisierte", + "unfavorite": "Nicht favorisierte" + }, "paused": { "active": "Aktiv", "all": "Alle", "paused": "Pausiert" - } + }, + "runIdPatternFilter": "Dag Läufe suchen" }, "ownerLink": "Besitzer Verlinkungen zu {{owner}}", "runAndTaskActions": { @@ -83,5 +90,6 @@ "desc": "Sortiert nach nächstem Laufdatum (Letzter-Erster)" }, "placeholder": "Sortieren nach" - } + }, + "unfavoriteDag": "Von den Favoriten entfernen" } diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/de/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/de/dashboard.json index dc53117a11b7e..73ae156e96e64 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/de/dashboard.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/de/dashboard.json @@ -1,4 +1,10 @@ { + "favorite": { + "favoriteDags_one": "Erster favorisierter DAG", + "favoriteDags_other": "Top {{count}} favorisierte DAGs", + "noDagRuns": "Noch kein Lauf für dieses Dag.", + "noFavoriteDags": "Noch keine favorisierten Dags. Mit dem Stern-Symbol neben einem Dag kann man das Dag zu den Favoriten hinzufügen." + }, "group": "Gruppe", "health": { "dagProcessor": "Dag Prozessor", diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/common.json index d437f984494cb..ff724bc488637 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/en/common.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/common.json @@ -81,6 +81,7 @@ "expand":{ "collapse": "Collapse", "expand": "Expand", + "hotkey": "e", "tooltip": "Press {{hotkey}} to toggle expand" }, "expression": { @@ -153,6 +154,11 @@ }, "selectLanguage": "Select Language", "showDetailsPanel": "Show Details Panel", + "source": { + "hide": "Hide Source", + "hotkey": "s", + "show": "Show Source" + }, "sourceAssetEvent_one": "Source Asset Event", "sourceAssetEvent_other": "Source Asset Events", "startDate": "Start Date", @@ -232,6 +238,11 @@ "lastHour": "Last Hour", "pastWeek": "Past Week" }, + "timestamp": { + "hide": "Hide Timestamps", + "hotkey": "t", + "show": "Show Timestamps" + }, "timezone": "Timezone", "timezoneModal": { "current-timezone": "Current time in", @@ -281,6 +292,7 @@ "tryNumber": "Try Number", "user": "User", "wrap": { + "hotkey": "w", "tooltip": "Press {{hotkey}} to toggle wrap", "unwrap": "Unwrap", "wrap": "Wrap" diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/dag.json index 92b8e59496cca..6654bf3b56158 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/en/dag.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/dag.json @@ -34,6 +34,7 @@ }, "info": "INFO", "noTryNumber": "No try number", + "settings": "Log Settings", "viewInExternal": "View logs in {{name}} (attempt {{attempt}})", "warning": "WARNING" }, diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/dags.json index c804a985fb570..d1ee6a3d0eeb7 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/en/dags.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/dags.json @@ -6,14 +6,21 @@ "warning": "This will remove all metadata related to the Dag, including Runs and Tasks." } }, + "favoriteDag": "Favorite Dag", "filters": { "allRunTypes": "All Run Types", "allStates": "All States", + "favorite": { + "all": "All", + "favorite": "Favorite", + "unfavorite": "Unfavorite" + }, "paused": { "active": "Active", "all": "All", "paused": "Paused" - } + }, + "runIdPatternFilter": "Search Dag Runs" }, "ownerLink": "Owner link for {{owner}}", "runAndTaskActions": { @@ -83,5 +90,6 @@ "desc": "Sort by Next Dag Run (Latest-Earliest)" }, "placeholder": "Sort by" - } + }, + "unfavoriteDag": "Unfavorite Dag" } diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/en/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/en/dashboard.json index 3b945abbb4552..666e28090f383 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/en/dashboard.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/en/dashboard.json @@ -1,4 +1,10 @@ { + "favorite": { + "favoriteDags_one": "First {{count}} favorite DAG", + "favoriteDags_other": "First {{count}} favorite DAGs", + "noDagRuns": "There is no DagRun for this dag yet.", + "noFavoriteDags": "No favorites yet. Click the star icon next to a DAG in the list to add it to your favorites." + }, "group": "Group", "health": { "dagProcessor": "Dag Processor", diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/admin.json new file mode 100644 index 0000000000000..10e1da9f850a7 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/admin.json @@ -0,0 +1,166 @@ +{ + "columns":{ + "description": "Descripción", + "key": "Clave", + "name": "Nombre", + "value": "Valor" + }, + "config":{ + "columns":{ + "section": "Sección" + }, + "title": "Configuración de Airflow" + }, + "connections":{ + "add": "Agregar Conexión", + "columns":{ + "connectionId": "ID de la Conexión", + "connectionType": "Tipo de Conexión", + "host": "Host", + "port": "Puerto" + }, + "connection_one": "Conexión", + "connection_other": "Conexiones", + "delete":{ + "deleteConnection_one": "Eliminar 1 conexión", + "deleteConnection_other": "Eliminar {{count}} conexiones", + "firstConfirmMessage_one": "Estás a punto de eliminar la siguiente conexión:", + "firstConfirmMessage_other": "Estas a punto de eliminar las siguientes conexiones:", + "title": "Eliminar Conexión" + }, + "edit": "Editar Conexión", + "form":{ + "connectionIdRequired": "El ID de la conexión es requerido", + "connectionIdRequirement": "El ID de la conexión no puede contener solo espacios", + "connectionTypeRequired": "El Tipo de Conexión es requerido", + "extraFields": "Campos Extra", + "extraFieldsJson": "Campos Extra (tipo JSON)", + "helperText": "¿Falta el Tipo de conexión? Asegúrate de haber instalado el paquete de proveedores de Airflow correspondiente.", + "selectConnectionType": "Seleccionar Tipo de Conexión", + "standardFields": "Campos Estándar" + }, + "nothingFound": { + "description": "Las conexiones definidas a través de variables de entorno o gestores de secretos no se muestran aquí.", + "documentationLink": "Aprende más en la documentación de Airflow.", + "learnMore": "Estas se resuelven en tiempo de ejecución y no se muestran en la interfaz de usuario.", + "title": "No se encontraron conexiones!" + }, + "searchPlaceholder": "Buscar Conexiones", + "test": "Prueba de Conexión", + "testDisabled": "La función de prueba de conexión está desactivada. Por favor, contacta a un administrador para activarla.", + "typeMeta": { + "error": "Error al recuperar la Metadata del Tipo de Conexión", + "standardFields": { + "description": "Descripción", + "host": "Host", + "login": "Login", + "password": "Contraseña", + "port": "Puerto", + "url_schema": "Esquema" + } + } + }, + "deleteActions":{ + "button": "Eliminar", + "modal":{ + "confirmButton": "Sí, Eliminar", + "secondConfirmMessage": "Esta acción es permanente y no se puede deshacer.", + "thirdConfirmMessage": "¿Confirmas de que quieres proceder?" + }, + "selected": "Seleccionado", + "tooltip": "Eliminar conexiones seleccionadas" + }, + "formActions":{ + "reset": "Restablecer", + "save": "Guardar" + }, + "plugins": { + "columns": { + "source": "Origen" + }, + "importError_one": "Error de Importación de Plugin", + "importError_other": "Errores de Importación de Plugins", + "searchPlaceholder": "Buscar por archivo" + }, + "pools": { + "add": "Agregar Pool", + "deferredSlotsIncluded": "Slots Diferidos Incluidos", + "delete":{ + "title": "Eliminar Pool", + "warning": "Esto eliminará toda la metadata relacionada con el pool y puede afectar a las tareas que usan este pool." + }, + "edit": "Editar Pool", + "form": { + "checkbox": "Marcar para incluir tareas diferidas cuando se calculan los slots abiertos del pool", + "description": "Descripción", + "includeDeferred": "Incluir diferidos", + "nameMaxLength": "El nombre puede contener un máximo de 256 caracteres", + "nameRequired": "El nombre es requerido", + "slots": "Slots" + }, + "noPoolsFound": "No se encontraron pools", + "pool_one": "Pool", + "pool_other": "Pools", + "searchPlaceholder": "Buscar Pools", + "sort": { + "asc": "Nombre (A-Z)", + "desc": "Nombre (Z-A)", + "placeholder": "Ordenar por" + } + }, + "providers": { + "columns": { + "packageName": "Nombre del Paquete", + "version": "Versión" + } + }, + "variables": { + "add": "Agregar Variable", + "columns": { + "isEncrypted": "Está encriptada" + }, + "delete": { + "deleteVariable_one": "Eliminar 1 Variable", + "deleteVariable_other": "Eliminar {{count}} Variables", + "firstConfirmMessage_one": "Estás a punto de eliminar la siguiente variable:", + "firstConfirmMessage_other": "Estás a punto de eliminar las siguientes variables:", + "title": "Eliminar Variable", + "tooltip": "Eliminar variables seleccionadas" + }, + "edit": "Editar Variable", + "export": "Exportar", + "exportTooltip": "Exportar variables seleccionadas", + "form": { + "invalidJson": "JSON inválido", + "keyMaxLength": "La clave puede contener un máximo de 250 caracteres", + "keyRequired": "La clave es requerida", + "valueRequired": "El valor es requerido" + }, + "import": { + "button": "Import", + "conflictResolution": "Seleccionar Resolución de Conflicto de Variables", + "errorParsingJsonFile": "Error al analizar el archivo JSON: Cargar un archivo JSON que contenga variables (e.g., {\"key\": \"value\", ...}).", + "options": { + "fail": { + "description": "Falla la importación si se detectan variables existentes.", + "title": "Fallar" + }, + "overwrite": { + "description": "Sobrescribe la variable en caso de conflicto.", + "title": "Sobrescribir" + }, + "skip": { + "description": "Omite la importación de variables que ya existen.", + "title": "Omitir" + } + }, + "title": "Importar Variables", + "upload": "Cargar un Archivo JSON", + "uploadPlaceholder": "Cargar un archivo JSON que contenga variables (e.g., {\"key\": \"value\", ...})" + }, + "noRowsMessage": "No se encontraron variables", + "searchPlaceholder": "Buscar Claves", + "variable_one": "Variable", + "variable_other": "Variables" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/assets.json new file mode 100644 index 0000000000000..e321d17a12740 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/assets.json @@ -0,0 +1,29 @@ +{ + "consumingDags": "Consumiendo DAGs", + "createEvent": { + "button": "Crear Evento", + "manual": { + "description": "Crear un Evento de Asset manualmente", + "extra": "Evento de Asset Extra", + "label": "Manual" + }, + "materialize": { + "description": "Activar el DAG upstream de este asset", + "descriptionWithDag": "Activar el DAG upstream de este asset: {{dagName}}", + "label": "Materializar", + "unpauseDag": "Despausar {{dagName}} al activar" + }, + "success": { + "manualDescription": "La creación de eventos de asset manual fue exitosa.", + "manualTitle": "Evento de Asset Creado", + "materializeDescription": "El DAG upstream {{dagId}} fue activado exitosamente.", + "materializeTitle": "Materializando Asset" + }, + "title": "Crear Evento de Asset para {{name}}" + }, + "group": "Grupo", + "lastAssetEvent": "Último Evento de Asset", + "name": "Nombre", + "producingTasks": "Tareas produciendo", + "searchPlaceholder": "Buscar Assets" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/browse.json new file mode 100644 index 0000000000000..7c747889f48df --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/browse.json @@ -0,0 +1,23 @@ +{ + "auditLog":{ + "actions": { + "collapseAllExtra": "Colapsar todos los extra json", + "expandAllExtra": "Expandir todos los extra json" + }, + "columns":{ + "event": "Evento", + "extra": "Extra", + "user": "Usuario", + "when": "Cuando" + }, + "title": "Auditar Log" + }, + "xcom":{ + "columns":{ + "dag": "DAG", + "key": "Clave", + "value": "Valor" + }, + "title": "XCom" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/common.json new file mode 100644 index 0000000000000..9a00a8cb2a9f1 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/common.json @@ -0,0 +1,280 @@ +{ + "admin": { + "Config": "Configuración", + "Connections": "Conexiones", + "Plugins": "Plugins", + "Pools": "Pools", + "Providers": "Proveedores", + "Variables": "Variables" + }, + "asset_one": "Asset", + "asset_other": "Assets", + "assetEvent_one": "Evento de Asset", + "assetEvent_other": "Eventos de Asset", + "backfill_one": "Backfill", + "backfill_other": "Backfills", + "browse": { + "auditLog": "Auditar Log", + "xcoms": "XComs" + }, + "collapseDetailsPanel": "Colapsar Detalles del Panel", + "createdAssetEvent_one": "Evento de Asset Creado", + "createdAssetEvent_other": "Eventos de Asset Creados", + "dag_one": "DAG", + "dag_other": "DAGs", + "dagDetails": { + "catchup": "Catchup", + "concurrency": "Concurrencia", + "dagRunTimeout": "Tiempo de Ejecución del DAG", + "defaultArgs": "Argumentos por Defecto", + "description": "Descripción", + "documentation": "Documentación del DAG", + "fileLocation": "Ubicación del Archivo", + "hasTaskConcurrencyLimits": "Tiene límites de concurrencia de áreas", + "lastExpired": "Último Expirado", + "lastParsed": "Último Parseado", + "latestDagVersion": "Última Versión del DAG", + "latestRun": "Última Ejecución", + "maxActiveRuns": "Máximo de Ejecuciones Activas", + "maxActiveTasks": "Máximo de Tareas Activas", + "maxConsecutiveFailedDagRuns": "Máximo de Ejecuciones Fallidas Consecutivas del DAG", + "nextRun": "Siguiente Ejecución", + "owner": "Propietario", + "params": "Parámetros", + "schedule": "Programación", + "tags": "Etiquetas" + }, + "dagId": "ID del DAG", + "dagRun": { + "conf": "Conf", + "dagVersions": "Versión(es) del DAG", + "dataIntervalEnd": "Intervalo de Datos Final", + "dataIntervalStart": "Intervalo de Datos Inicial", + "lastSchedulingDecision": "Última Decisión de Programación", + "queuedAt": "En Cola en", + "runAfter": "Ejecutar Después", + "runType": "Tipo de Ejecución", + "sourceAssetEvent": "Evento de Asset Fuente", + "triggeredBy": "Activado por" + }, + "dagRun_one": "Ejecución del DAG", + "dagRun_other": "Ejecuciones del DAG", + "dagWarnings": "Advertencias/Errores del DAG", + "defaultToGraphView": "Por defecto a vista gráfica", + "defaultToGridView": "Por defecto a vista en cuadrícula", + "direction": "Dirección", + "docs": { + "documentation": "Documentación", + "githubRepo": "Repositorio de GitHub", + "restApiReference": "Referencia de REST API" + }, + "duration": "Duración", + "endDate": "Fecha Final", + "error": { + "back": "Atrás", + "defaultMessage": "Ocurrió un error inesperado", + "home": "Inicio", + "notFound": "Página no encontrada", + "title": "Error" + }, + "expression": { + "all": "Todos", + "and": "Y", + "any": "Cualquiera", + "or": "O" + }, + "logicalDate": "Fecha Lógica", + "logout": "Cerrar Sesión", + "logoutConfirmation": "Estás a punto de cerrar sesión de la aplicación.", + "mapIndex": "Mapa de Índice", + "modal": { + "cancel": "Cancelar", + "confirm": "Confirmar", + "delete": { + "button": "Eliminar", + "confirmation": "¿Confirmas de querer eliminar {{resourceName}}? Esta acción no se puede deshacer." + } + }, + "nav": { + "admin": "Administración", + "assets": "Assets", + "browse": "Navegar", + "dags": "DAGs", + "docs": "Docs", + "home": "Inicio", + "plugins": "Plugins", + "security": "Seguridad" + }, + "noItemsFound": "No se encontraron {{modelName}}s", + "note": { + "add": "Agregar una nota", + "dagRun": "Nota de Ejecución del DAG", + "label": "Nota", + "placeholder": "Agregar una nota...", + "taskInstance": "Nota de Instancia de Tarea" + }, + "pools": { + "deferred": "Diferido", + "open": "Abierto", + "pools_one": "pool", + "pools_other": "pools", + "queued": "En Cola", + "running": "En Ejecución", + "scheduled": "Programado" + }, + "runId": "ID de la corrida", + "runTypes": { + "asset_triggered": "Asset Activado", + "backfill": "Backfill", + "manual": "Manual", + "scheduled": "Programado" + }, + "scroll": { + "direction": { + "bottom": "abajo", + "top": "arriba" + }, + "tooltip": "Presiona {{hotkey}} para desplazarte a {{direction}}" + }, + "seconds": "{{count}}s", + "security": { + "actions": "Acciones", + "permissions": "Permisos", + "resources": "Recursos", + "roles": "Roles", + "users": "Usuarios" + }, + "selectLanguage": "Seleccionar Idioma", + "showDetailsPanel": "Mostrar Panel de Detalles", + "sourceAssetEvent_one": "Evento de Asset Fuente", + "sourceAssetEvent_other": "Eventos de Asset Fuente", + "startDate": "Fecha Inicial", + "state": "Estado", + "states": { + "deferred": "Diferido", + "failed": "Fallido", + "no_status": "Sin Estado", + "none": "Sin Estado", + "queued": "En Cola", + "removed": "Removido", + "restarting": "Reiniciando", + "running": "En Ejecución", + "scheduled": "Programado", + "skipped": "Omitido", + "success": "Exitoso", + "up_for_reschedule": "Por Reprogramar", + "up_for_retry": "Por Reintentar", + "upstream_failed": "Fallido en Upstream" + }, + "switchToDarkMode": "Cambiar a Modo Oscuro", + "switchToLightMode": "Cambiar a Modo Claro", + "table": { + "completedAt": "Completado en", + "createdAt": "Creado en", + "filterByTag": "Filtrar DAGs por etiqueta", + "filterColumns": "Filtrar columnas de la tabla", + "filterReset_one": "Restablecer filtro", + "filterReset_other": "Restablecer filtros", + "from": "Desde", + "maxActiveRuns": "Máximo de Ejecuciones Activas", + "noTagsFound": "No se encontraron etiquetas", + "tagMode": { + "all": "Todos", + "any": "Cualquiera" + }, + "tagPlaceholder": "Filtrar por etiqueta", + "to": "Hasta" + }, + "task": { + "documentation": "Documentación de la Tarea", + "lastInstance": "Última Instancia", + "operator": "Operador", + "triggerRule": "Regla de Activación" + }, + "task_one": "Tarea", + "task_other": "Tareas", + "taskId": "ID de la Tarea", + "taskInstance": { + "dagVersion": "Versión del DAG", + "executor": "Executor", + "executorConfig": "Configuración del Executor", + "hostname": "Nombre de Host", + "maxTries": "Máximo de Intentos", + "pid": "PID", + "pool": "Pool", + "poolSlots": "Slots del Pool", + "priorityWeight": "Peso de Prioridad", + "queue": "Cola", + "queuedWhen": "En Cola en", + "scheduledWhen": "Programado en", + "triggerer": { + "assigned": "Triggerer Asignado", + "class": "Clase del Trigger", + "createdAt": "Tiempo de Creación del Trigger", + "id": "ID del Trigger", + "latestHeartbeat": "Último Heartbeat del Triggerer", + "title": "Información del Triggerer" + }, + "unixname": "Nombre de Unix" + }, + "taskInstance_one": "Instancia de Tarea", + "taskInstance_other": "Instancias de Tarea", + "timeRange": { + "last12Hours": "Últimas 12 Horas", + "last24Hours": "Últimas 24 Horas", + "lastHour": "Última Hora", + "pastWeek": "Semana Pasada" + }, + "timezone": "Zona Horaria", + "timezoneModal": { + "current-timezone": "Hora actual en", + "placeholder": "Seleccionar una zona horaria", + "title": "Seleccionar Zona Horaria", + "utc": "UTC (Tiempo Universal Coordinado)" + }, + "toaster": { + "bulkDelete": { + "error": "Eliminar {{resourceName}} Request Fallido", + "success": { + "description": "{{count}} {{resourceName}} han sido eliminados exitosamente. Claves: {{keys}}", + "title": "Eliminar {{resourceName}} Request Enviado" + } + }, + "create": { + "error": "Crear {{resourceName}} Request Fallido", + "success": { + "description": "{{resourceName}} ha sido creado exitosamente.", + "title": "Crear {{resourceName}} Request Enviado" + } + }, + "delete": { + "error": "Eliminar {{resourceName}} Request Fallido", + "success": { + "description": "{{resourceName}} ha sido eliminado exitosamente.", + "title": "Eliminar {{resourceName}} Request Enviado" + } + }, + "import": { + "error": "Importar {{resourceName}} Request Fallido", + "success": { + "description": "{{count}} {{resourceName}} han sido importados exitosamente.", + "title": "Importar {{resourceName}} Request Enviado" + } + }, + "update": { + "error": "Actualizar {{resourceName}} Request Fallido", + "success": { + "description": "{{resourceName}} ha sido actualizado exitosamente.", + "title": "Actualizar {{resourceName}} Request Enviado" + } + } + }, + "triggered": "Activado", + "tryNumber": "Intento Número", + "user": "Usuario", + "wrap": { + "tooltip": "Presiona {{hotkey}} para alternar el 'envolver'", + "unwrap": "Desenvolver", + "wrap": "Envolver" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/components.json new file mode 100644 index 0000000000000..5be54f7d1c4bb --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/components.json @@ -0,0 +1,131 @@ +{ + "backfill": { + "affected_one": "1 ejecución será activada.", + "affected_other": "{{count}} ejecuciones serán activadas.", + "affectedNone": "No hay ejecuciones que coincidan con los criterios seleccionados.", + "backwards": "Ejecutar Hacia Atrás", + "dateRange": "Rango de Fechas", + "dateRangeFrom": "Desde", + "dateRangeTo": "Hasta", + "errorStartDateBeforeEndDate": "La Fecha Inicial debe ser antes de la Fecha Final", + "maxRuns": "Máximo de Ejecuciones Activas", + "reprocessBehavior": "Comportamiento de Reprocesamiento", + "run": "Ejecutar Backfill", + "selectDescription": "Ejecutar este DAG para un rango de fechas", + "selectLabel": "Backfill", + "title": "Ejecutar Backfill", + "toaster": { + "success": { + "description": "Backfill jobs han sido activados exitosamente.", + "title": "Backfill generado" + } + }, + "tooltip": "Backfill requiere una programación", + "unpause": "Reanudar {{dag_display_name}} al activarse", + "validation": { + "datesRequired": "Ambos intervalos de Fecha Inicial y Fecha Final deben ser proporcionados.", + "startBeforeEnd": "El intervalo de Fecha Inicial debe ser menor o igual a la Fecha Final." + } + }, + "banner": { + "backfillInProgress": "Backfill en progreso", + "cancel": "Cancelar backfill", + "pause": "Pausar backfill", + "unpause": "Reanudar backfill" + }, + "clipboard": { + "copy": "Copiar" + }, + "close": "Cerrar", + "configForm": { + "advancedOptions": "Opciones Avanzadas", + "configJson": "Configuración JSON", + "invalidJson": "Formato JSON inválido: {{errorMessage}}" + }, + "dagWarnings": { + "error_one": "1 Error", + "errorAndWarning": "1 Error y {{warningText}}", + "warning_one": "1 Aviso", + "warning_other": "{{count}} Advertencias" + }, + "durationChart": { + "duration": "Duración (segundos)", + "lastDagRun_one": "Última Ejecución de DAG", + "lastDagRun_other": "Últimas {{count}} Ejecuciones de DAG", + "lastTaskInstance_one": "Última Instancia de Tarea", + "lastTaskInstance_other": "Últimas {{count}} Instancias de Tarea", + "queuedDuration": "Duración en Cola", + "runAfter": "Ejecutar Después", + "runDuration": "Duración de la Ejecución" + }, + "fileUpload": { + "files_other": "{{count}} archivos" + }, + "flexibleForm": { + "placeholder": "Seleccionar Valor", + "placeholderArray": "Ingrese cada cadena en una nueva línea", + "placeholderExamples": "Comience a escribir para ver opciones", + "placeholderMulti": "Seleccionar uno o múltiples valores", + "validationErrorArrayNotArray": "El valor debe ser un array.", + "validationErrorArrayNotNumbers": "Todos los elementos en el array deben ser números.", + "validationErrorArrayNotObject": "Todos los elementos en el array deben ser objetos.", + "validationErrorRequired": "Este campo es requerido" + }, + "graph": { + "directionDown": "De arriba a abajo", + "directionLeft": "De derecha a izquierda", + "directionRight": "De izquierda a derecha", + "directionUp": "De abajo a arriba", + "downloadImage": "Descargar imagen", + "downloadImageError": "Error al descargar la imagen.", + "downloadImageErrorTitle": "Descarga Fallida", + "otherDagRuns": "+Otras Ejecuciones de DAG", + "taskCount_one": "{{count}} Tarea", + "taskCount_other": "{{count}} Tareas", + "taskGroup": "Grupo de Tareas" + }, + "limitedList": "+{{count}} más", + "logs": { + "file": "Archivo", + "location": "línea {{line}} en {{name}}" + }, + "reparseDag": "Reparar DAG", + "sortedAscending": "ordenado ascendente", + "sortedDescending": "ordenado descendente", + "sortedUnsorted": "sin ordenar", + "taskTries": "Intentos de Tarea", + "toggleCardView": "Mostrar vista de tarjeta", + "toggleTableView": "Mostrar vista de tabla", + "triggerDag": { + "button": "Trigger", + "loading": "Cargando información del DAG...", + "loadingFailed": "Error al cargar la información del DAG. Por favor, inténtelo de nuevo.", + "runIdHelp": "Opcional - se generará si no se proporciona", + "selectDescription": "Activar una ejecución única de este DAG", + "selectLabel": "Ejecución Única", + "title": "Activar DAG", + "toaster": { + "success": { + "description": "La ejecución del DAG ha sido activada exitosamente.", + "title": "Ejecución del DAG Activada" + } + }, + "unpause": "Reanudar {{dagDisplayName}} al activarse" + }, + "trimText": { + "details": "Detalles", + "empty": "Vacío", + "noContent": "No hay contenido disponible." + }, + "versionDetails": { + "bundleLink": "Enlace del Bundle", + "bundleName": "Nombre del Bundle", + "bundleVersion": "Versión del Bundle", + "createdAt": "Creado en", + "versionId": "ID de la Versión" + }, + "versionSelect": { + "dagVersion": "Versión del DAG", + "versionCode": "v{{versionCode}}" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/dag.json new file mode 100644 index 0000000000000..4d96f2abfddbb --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/dag.json @@ -0,0 +1,110 @@ +{ + "allRuns": "Todas las Ejecuciones", + "blockingDeps": { + "dependency": "Dependencia", + "reason": "Razón", + "title": "Dependencias que bloquean la tarea de ser programada" + }, + "code": { + "bundleUrl": "URL del Bundle", + "noCode": "No se encontró código", + "parsedAt": "Parseado en:" + }, + "extraLinks": "Enlaces Extra", + "grid": { + "buttons": { + "resetToLatest": "Reiniciar a la última", + "toggleGroup": "Alternar grupo" + } + }, + "header": { + "buttons": { + "dagDocs": "Documentacion del DAG" + } + }, + "logs": { + "noTryNumber": "No hay número de intento", + "viewInExternal": "Ver logs en {{name}} (intento {{attempt}})" + }, + "overview": { + "buttons": { + "failedRun_one": "Ejecución Fallida", + "failedRun_other": "Ejecuciones Fallidas", + "failedTask_one": "Tarea Fallida", + "failedTask_other": "Tareas Fallidas", + "failedTaskInstance_one": "Instancia de Tarea Fallida", + "failedTaskInstance_other": "Instancias de Tarea Fallidas" + }, + "charts": { + "assetEvent_one": "Evento de Asset Creado", + "assetEvent_other": "Eventos de Asset Creados" + }, + "failedLogs": { + "title": "Logs de Tareas Fallidas Recientes", + "viewFullLogs": "Ver logs completos" + } + }, + "panel": { + "buttons": { + "options": "Opciones", + "showGraph": "Mostrar Gráfico", + "showGrid": "Mostrar Grilla" + }, + "dagRuns": { + "label": "Número de Ejecuciones de DAG" + }, + "dependencies": { + "label": "Dependencias", + "options": { + "allDagDependencies": "Todas las Dependencias de DAG", + "externalConditions": "Condiciones Externas", + "onlyTasks": "Solo tareas" + }, + "placeholder": "Dependencias" + }, + "graphDirection": { + "label": "Dirección del Gráfico" + } + }, + "paramsFailed": "Error al cargar los parámetros", + "parse": { + "toaster": { + "error": { + "description": "El DAG no pudo ser reparado. Puede haber solicitudes de reparsing pendientes por procesar.", + "title": "El DAG no pudo ser reparado" + }, + "success": { + "description": "El DAG debería reparsearse pronto.", + "title": "Solicitud de reparsing enviada exitosamente" + } + } + }, + "tabs": { + "assetEvents": "Eventos de Asset", + "auditLog": "Auditoría de Log", + "backfills": "Backfills", + "code": "Código", + "details": "Detalles", + "logs": "Logs", + "mappedTaskInstances_one": "Instancia de Tarea [{{count}}]", + "mappedTaskInstances_other": "Instancias de Tarea [{{count}}]", + "overview": "Resumen", + "renderedTemplates": "Plantillas Renderizadas", + "runs": "Ejecuciones", + "taskInstances": "Instancias de Tarea", + "tasks": "Tareas", + "xcom": "XCom" + }, + "taskGroups": { + "collapseAll": "Colapsar todos los grupos de tareas", + "expandAll": "Expandir todos los grupos de tareas" + }, + "taskLogs": { + "allLogLevels": "Todos los Niveles de Log", + "allSources": "Todos los Orígenes", + "fullscreen": { + "button": "Pantalla completa", + "tooltip": "Presiona {{hotkey}} para pantalla completa" + } + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/dags.json new file mode 100644 index 0000000000000..491e23dfc4bc9 --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/dags.json @@ -0,0 +1,87 @@ +{ + "assetSchedule": "{{count}} de {{total}} assets actualizados", + "dagActions": { + "delete": { + "button": "Eliminar DAG", + "warning": "Esto eliminará toda la metadata relacionada con el DAG, incluyendo Ejecuciones y Tareas." + } + }, + "filters": { + "allRunTypes": "Todos los Tipos de Ejecución", + "allStates": "Todos los Estados", + "paused": { + "active": "Activo", + "all": "Todos", + "paused": "Pausado" + } + }, + "ownerLink": "Enlace de Propietario para {{owner}}", + "runAndTaskActions": { + "affectedTasks": { + "noItemsFound": "No se encontraron tareas.", + "title": "Tareas Afectadas: {{count}}" + }, + "clear": { + "button": "Limpiar {{type}}", + "buttonTooltip": "Presiona shift+c para limpiar", + "error": "Error al limpiar {{type}}", + "title": "Limpiar {{type}}" + }, + "delete": { + "button": "Eliminar {{type}}", + "dialog": { + "resourceName": "{{type}} {{id}}", + "title": "Eliminar {{type}}", + "warning": "Esto eliminará toda la metadata relacionada con el {{type}}." + }, + "error": "Error al eliminar {{type}}", + "success": { + "description": "La solicitud de eliminación de {{type}} fue exitosa.", + "title": "{{type}} Eliminado Exitosamente" + } + }, + "markAs": { + "button": "Marcar {{type}} como...", + "buttonTooltip": { + "failed": "Presiona shift+f para marcar como fallido", + "success": "Presiona shift+s para marcar como exitoso" + }, + "title": "Marcar {{type}} como {{state}}" + }, + "options": { + "downstream": "Downstream", + "existingTasks": "Limpiar tareas existentes", + "future": "Futuro", + "onlyFailed": "Limpiar solo tareas fallidas", + "past": "Pasado", + "queueNew": "Poner en cola nuevas tareas", + "upstream": "Upstream" + } + }, + "search": { + "advanced": "Búsqueda Avanzada", + "clear": "Limpiar búsqueda", + "dags": "Buscar DAGs", + "hotkey": "+K", + "tasks": "Buscar Tareas" + }, + "sort": { + "displayName": { + "asc": "Ordenar por Nombre (A-Z)", + "desc": "Ordenar por Nombre (Z-A)" + }, + "lastRunStartDate": { + "asc": "Ordenar por Fecha Inicial de Ejecución (Más Antiguo-Más Reciente)", + "desc": "Ordenar por Fecha Inicial de Ejecución (Más Reciente-Más Antiguo)" + }, + "lastRunState": { + "asc": "Ordenar por Estado de Ejecución (A-Z)", + "desc": "Ordenar por Estado de Ejecución (Z-A)" + }, + "nextDagRun": { + "asc": "Ordenar por Ejecución de DAG (Más Antiguo-Más Reciente)", + "desc": "Ordenar por Ejecución de DAG (Más Reciente-Más Antiguo)" + }, + "placeholder": "Ordenar por" + } +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/es/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/es/dashboard.json new file mode 100644 index 0000000000000..bc9ed1194d54f --- /dev/null +++ b/airflow-core/src/airflow/ui/public/i18n/locales/es/dashboard.json @@ -0,0 +1,38 @@ +{ + "group": "Grupo", + "health": { + "dagProcessor": "Procesador de DAGs", + "health": "Salud", + "healthy": "Saludable", + "lastHeartbeat": "Último Heartbeat", + "metaDatabase": "Base de datos de la metadata", + "scheduler": "Programador", + "status": "Estado", + "triggerer": "Triggerer", + "unhealthy": "No Saludable" + }, + "history": "Historial", + "importErrors": { + "dagImportError_one": "Error de Importación de DAG", + "dagImportError_other": "Errores de Importación de DAGs", + "searchByFile": "Buscar por archivo", + "timestamp": "Timestamp" + }, + "managePools": "Gestionar Pools", + "noAssetEvents": "No se encontraron Eventos de Asset.", + "poolSlots": "Slots del Pool", + "sortBy": { + "newestFirst": "Más Recientes", + "oldestFirst": "Más Antiguos" + }, + "source": "Origen", + "stats": { + "activeDags": "DAGs Activos", + "failedDags": "DAGs Fallidos", + "queuedDags": "DAGs en Cola", + "runningDags": "DAGs en Ejecución", + "stats": "Estadísticas" + }, + "uri": "URI", + "welcome": "Te damos la bienvenida" +} diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/admin.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/admin.json index adadffd1961e3..be990027c1e4a 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/he/admin.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/admin.json @@ -40,7 +40,10 @@ "standardFields": "שדות סטנדרטיים" }, "nothingFound": { - "title": "לא נמצאו חיבורים" + "description": "חיבורים המוגדרים באמצעות משתני סביבה או מנהלי סודות אינם מוצגים כאן.", + "documentationLink": "למד עוד בתיעוד של Airflow.", + "learnMore": "אלה נפתרים בזמן ריצה ואינם גלויים בממשק המשתמש.", + "title": "לא נמצאו חיבורים" }, "searchPlaceholder": "חפש חיבורים", "test": "בדוק חיבור", @@ -57,9 +60,9 @@ } } }, - "deleteActions":{ + "deleteActions": { "button": "מחק", - "modal":{ + "modal": { "confirmButton": "אשר מחיקה", "secondConfirmMessage": "פעולה זו איננה הפיכה", "thirdConfirmMessage": " האם אתה בטוח שברצונך להמשיך?" @@ -67,7 +70,7 @@ "selected": "נבחר", "tooltip": "מחק חיבורים נבחרים" }, - "formActions":{ + "formActions": { "reset": "אתחל", "save": "שמור" }, @@ -82,7 +85,7 @@ "pools": { "add": "הוסף מאגר משאבים", "deferredSlotsIncluded": "כולל מקומות שמורים למשימות מושהות", - "delete":{ + "delete": { "title": "מחק מאגר משאבים", "warning": "זה יימחק את כל המטא-נתונים המקושרים עם המאגר ועלולים להשפיע על משימות המקושרות עם המאגר" }, @@ -124,14 +127,14 @@ "title": "מחק מאגר משאבים", "tooltip": "מחר משתנים נבחרים" }, - "edit": "Edit Variable", - "export": "Export", - "exportTooltip": "Export selected variables", + "edit": "ערוך משתנה", + "export": "ייצא", + "exportTooltip": "ייצא משתנים נבחרים", "form": { - "invalidJson": "Invalid JSON", - "keyMaxLength": "Key can contain a maximum of 250 characters", - "keyRequired": "Key is required", - "valueRequired": "Value is required" + "invalidJson": "JSON לא תקין", + "keyMaxLength": "מפתח יכול להכיל מקסימום 250 תווים", + "keyRequired": "נדרש מפתח", + "valueRequired": "נדרש ערך" }, "import": { "button": "ייבא", diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/assets.json index aad80cc4a4c0f..64c57f2029ce6 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/he/assets.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/assets.json @@ -25,5 +25,6 @@ "lastAssetEvent": "אירוע אחרון בנכס", "name": "שם", "producingTasks": "מייצר משימות", + "scheduledDags": "Dags מתוזמנים", "searchPlaceholder": "חיפוש נכסים" } diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/common.json index d37c035fdbf35..c9f166db6d951 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/he/common.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/common.json @@ -17,6 +17,7 @@ "auditLog": "יומן מערכת", "xcoms": "XComs" }, + "collapseDetailsPanel": "כווץ לוח פרטים", "createdAssetEvent_one": "אירוע נכס שנוצר", "createdAssetEvent_other": "אירועי נכס שנוצרו", "dag_one": "Dag", @@ -54,7 +55,8 @@ "runAfter": "זמן ההרצה מוקדם ביותר", "runType": "סוג הרצה", "sourceAssetEvent": "אירוע נכס מקור", - "triggeredBy": "הופעל על-ידי" + "triggeredBy": "הופעל על-ידי", + "triggeringUser": "שם משתמש מפעיל" }, "dagRun_one": "הרצת Dag", "dagRun_other": "הרצת Dag", @@ -76,6 +78,12 @@ "notFound": "הדף לא נמצא", "title": "שגיאה" }, + "expand": { + "collapse": "כווץ", + "expand": "הרחב", + "hotkey": "e", + "tooltip": "לחץ {{hotkey}} לכיווץ או הרחבה" + }, "expression": { "all": "הכל", "and": "וגם", @@ -101,6 +109,7 @@ "dags": "Dags", "docs": "תיעוד", "home": "דף הבית", + "legacyFabViews": "תצוגות מורשת", "plugins": "תוספים", "security": "אבטחה" }, @@ -128,6 +137,13 @@ "manual": "הפעלה ידנית", "scheduled": "מתוזמן" }, + "scroll": { + "direction": { + "bottom": "תחתית", + "top": "עליון" + }, + "tooltip": "לחץ {{hotkey}} לגלילה ל{{direction}}" + }, "seconds": "{{count}} שניות", "security": { "actions": "פעולות", @@ -137,6 +153,12 @@ "users": "משתמשים" }, "selectLanguage": "בחר שפה", + "showDetailsPanel": "הצג לוח פרטים", + "source": { + "hide": "הסתר מקור", + "hotkey": "s", + "show": "הצג מקור" + }, "sourceAssetEvent_one": "אירוע נכס מקור", "sourceAssetEvent_other": "אירועי נכס מקור", "startDate": "תאריך התחלה", @@ -216,6 +238,11 @@ "lastHour": "השעה האחרונה", "pastWeek": "השבוע האחרון" }, + "timestamp": { + "hide": "הסתר חותמות זמן", + "hotkey": "t", + "show": "הצג חותמות זמן" + }, "timezone": "אזור זמן", "timezoneModal": { "current-timezone": "אזור זמן נוכחי", @@ -260,10 +287,12 @@ } } }, + "total": "סה״כ {{state}}", "triggered": "הופעל", "tryNumber": "מספר ניסיון", "user": "משתמש", "wrap": { + "hotkey": "w", "tooltip": "לחץ {{hotkey}} להפעלת/כיבוי גלישת טקסט", "unwrap": "בטל גלישת טקסט", "wrap": "הפעל גלישת טקסט" diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/components.json index ff272115ba3c8..530619fa9af43 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/he/components.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/components.json @@ -3,12 +3,15 @@ "affected_one": "ריצה אחת תופעל.", "affected_other": "{{count}} ריצות תופעלנה.", "affectedNone": "לא נמצאו ריצות התואמות לקריטריונים שנבחרו.", + "allRuns": "כל ההרצות", "backwards": "הרץ לאחור", "dateRange": "טווח תאריכים", "dateRangeFrom": "מתאריך", "dateRangeTo": "עד תאריך", "errorStartDateBeforeEndDate": "תאריך ההתחלה חייב להיות לפני תאריך הסיום", "maxRuns": "מספר ריצות מקבילות מירבי", + "missingAndErroredRuns": "הרצות חסרות ושגויות", + "missingRuns": "הרצות חסרות", "reprocessBehavior": "התנהגות עיבוד מחדש", "run": "הרץ Backfill", "selectDescription": "הרץ DAG זה עבור טווח תאריכים", @@ -44,6 +47,7 @@ }, "dagWarnings": { "error_one": "שגיאה אחת", + "error_other": "שגיאה אחת", "errorAndWarning": "שגיאה אחת ו-{{warningText}}", "warning_one": "אזהרה אחת", "warning_other": "{{count}} אזהרות" @@ -59,6 +63,7 @@ "runDuration": "משך זמן ריצה" }, "fileUpload": { + "files_one": "{{count}} קבצים", "files_other": "{{count}} קבצים" }, "flexibleForm": { diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/dag.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/dag.json index a05313b135235..6a0f65c200f47 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/he/dag.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/dag.json @@ -25,12 +25,18 @@ "logs": { "allLevels": "כל רמות הרישום", "allSources": "כל המקורות", + "critical": "CRITICAL", + "debug": "DEBUG", + "error": "ERROR", "fullscreen": { "button": "מסך מלא", "tooltip": "לחץ {{hotkey}} למסך מלא" }, + "info": "INFO", "noTryNumber": "אין מספר ניסיון", - "viewInExternal": "צפה ברישומים ב-{{name}} (ניסיון {{attempt}})" + "settings": "הגדרות רישום", + "viewInExternal": "צפה ברישומים ב-{{name}} (ניסיון {{attempt}})", + "warning": "WARNING" }, "overview": { "buttons": { diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/dags.json index 1978cbefca115..6d62ec3190b62 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/he/dags.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/dags.json @@ -6,14 +6,21 @@ "warning": "זה ימחק את כל המטא-נתונים הקשורים ל-Dag, כולל ריצות ומשימות" } }, + "favoriteDag": "הוסף Dag למועדפים", "filters": { "allRunTypes": "כל סוגי הריצות", "allStates": "כל המצבים", + "favorite": { + "all": "הכל", + "favorite": "מועדף", + "unfavorite": "לא מועדף" + }, "paused": { "active": "פעיל", "all": "הכל", "paused": "מושהה" - } + }, + "runIdPatternFilter": "חפש הרצת Dag" }, "ownerLink": "קישור בעלים ל-{{owner}}", "runAndTaskActions": { @@ -83,5 +90,6 @@ "desc": "מיין לפי ריצת ה-Dag הבאה (מהמאוחר למוקדם)" }, "placeholder": "מיין לפי" - } + }, + "unfavoriteDag": "הסר Dag ממועדפים" } diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/he/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/he/dashboard.json index 1ec8153551a06..818a590cce899 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/he/dashboard.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/he/dashboard.json @@ -1,38 +1,44 @@ { - "group": "קבוצה", - "health": { - "dagProcessor": "Dag מנתח", - "health": "תקינות", - "healthy": "תקין", - "lastHeartbeat": "פעימה אחרונה", - "metaDatabase": "מטא מסד-נתונים", - "scheduler": "מתזמן", - "status": "סטאטוס", - "triggerer": "מנהל תהליכים בהמתנה", - "unhealthy": "לא תקין" - }, - "history": "היסטוריה", - "importErrors": { - "dagImportError_one": "שגיאות בטעינת Dags", - "dagImportError_other": "שגיאות בטעינת Dags", - "searchByFile": "חפש לפי קובץ", - "timestamp": "חותמת זמן" - }, - "managePools": "ניהול מאגרים", - "noAssetEvents": "אין אירועים בנכסים", - "poolSlots": "סטאטוס מאגרים", - "sortBy": { - "newestFirst": "חדש קודם", - "oldestFirst": "ישן קודם" - }, - "source": "מקור", - "stats": { - "activeDags": "Dags פעיל", - "failedDags": "Dags בכשלון", - "queuedDags": "Dags בתור", - "runningDags": "Dags בריצה", - "stats": "סטטיסטיקות" - }, - "uri": "URI", - "welcome": "ברוכים הבאים" + "favorite": { + "favoriteDags_one": "{{count}} Dags מועדפים ראשונים", + "favoriteDags_other": "{{count}} Dags מועדפים ראשונים", + "noDagRuns": "אין עדיין הרצת Dag עבור Dag זה.", + "noFavoriteDags": "אין עדיין מועדפים. לחץ על סמל הכוכב ליד Dag ברשימה כדי להוסיף אותו למועדפים." + }, + "group": "קבוצה", + "health": { + "dagProcessor": "Dag מנתח", + "health": "תקינות", + "healthy": "תקין", + "lastHeartbeat": "פעימה אחרונה", + "metaDatabase": "מטא מסד-נתונים", + "scheduler": "מתזמן", + "status": "סטאטוס", + "triggerer": "מנהל תהליכים בהמתנה", + "unhealthy": "לא תקין" + }, + "history": "היסטוריה", + "importErrors": { + "dagImportError_one": "שגיאות בטעינת Dags", + "dagImportError_other": "שגיאות בטעינת Dags", + "searchByFile": "חפש לפי קובץ", + "timestamp": "חותמת זמן" + }, + "managePools": "ניהול מאגרים", + "noAssetEvents": "אין אירועים בנכסים", + "poolSlots": "סטאטוס מאגרים", + "sortBy": { + "newestFirst": "חדש קודם", + "oldestFirst": "ישן קודם" + }, + "source": "מקור", + "stats": { + "activeDags": "Dags פעיל", + "failedDags": "Dags בכשלון", + "queuedDags": "Dags בתור", + "runningDags": "Dags בריצה", + "stats": "סטטיסטיקות" + }, + "uri": "URI", + "welcome": "ברוכים הבאים" } diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/ko/common.json index 128585f1f9020..a831e1939ee50 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/ko/common.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/common.json @@ -78,6 +78,11 @@ "notFound": "페이지를 찾을 수 없습니다.", "title": "오류" }, + "expand": { + "collapse": "접기", + "expand": "펼치기", + "tooltip": "{{hotkey}}를 눌러 펼치기/접기합니다." + }, "expression": { "all": "모두", "and": "그리고", diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/ko/dags.json index 665294e4a01de..53eb878eb7ba5 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/ko/dags.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/dags.json @@ -6,14 +6,21 @@ "warning": "실행 및 작업를 포함하여 Dag과(와) 관련된 모든 메타데이터가 제거됩니다." } }, + "favoriteDag": "Dag 즐겨찾기", "filters": { "allRunTypes": "모든 실행 유형", "allStates": "모든 상태", + "favorite": { + "all": "전체", + "favorite": "즐겨찾기", + "unfavorite": "즐겨찾기 해제" + }, "paused": { "active": "활성", "all": "모두", "paused": "일시 중지됨" - } + }, + "runIdPatternFilter": "Dag 실행 검색" }, "ownerLink": "{{owner}}의 소유자 링크", "runAndTaskActions": { @@ -83,5 +90,6 @@ "desc": "다음 Dag 실행으로 정렬 (최신-이전)" }, "placeholder": "정렬 기준" - } + }, + "unfavoriteDag": "Dag 즐겨찾기 해제" } diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/ko/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/ko/dashboard.json index 84e3529ca5cbb..1e030cff52806 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/ko/dashboard.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/ko/dashboard.json @@ -1,4 +1,10 @@ { + "favorite": { + "favoriteDags_one": "{{count}}개의 즐겨찾기 Dag", + "favoriteDags_other": "{{count}}개의 즐겨찾기 Dags", + "noDagRuns": "이 Dag에는 아직 실행된 DagRun이 없습니다.", + "noFavoriteDags": "아직 즐겨찾기가 없습니다. 목록에서 Dag 옆의 별 아이콘을 클릭하여 즐겨찾기에 추가하세요." + }, "group": "그룹", "health": { "dagProcessor": "Dag 프로세서", diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/assets.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/assets.json index 77ca7e65894b9..8456890416b2b 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/pl/assets.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/assets.json @@ -1,5 +1,5 @@ { - "consumingDags": "Przetwarzanie DAG-ów", + "consumingDags": "Przetwarzanie Dagów", "createEvent": { "button": "Utwórz zdarzenie", "manual": { @@ -8,15 +8,15 @@ "label": "Ręcznie" }, "materialize": { - "description": "Wykonaj DAG zależny od tego zasobu", - "descriptionWithDag": "Wykonaj DAG zależny od tego zasobu: {{dagName}}", + "description": "Wykonaj Dag zależny od tego zasobu", + "descriptionWithDag": "Wykonaj Dag zależny od tego zasobu: {{dagName}}", "label": "Materializuj", "unpauseDag": "Wznów {{dagName}} przy wywołaniu" }, "success": { "manualDescription": "Ręczne utworzenie zdarzenia zasobu zakończone sukcesem.", "manualTitle": "Zdarzenie zasobu utworzone", - "materializeDescription": "Zależny DAG {{dagId}} został pomyślnie wywołany.", + "materializeDescription": "Zależny Dag {{dagId}} został pomyślnie wywołany.", "materializeTitle": "Materializowanie zasobu" }, "title": "Utwórz zdarzenie zasobu dla {{name}}" @@ -25,5 +25,6 @@ "lastAssetEvent": "Ostatnie zdarzenie zasobu", "name": "Nazwa", "producingTasks": "Zadania produkujące", + "scheduledDags": "Zaplanowane Dagi", "searchPlaceholder": "Szukaj zasobów" } diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/browse.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/browse.json index 5d078e5d3df74..5066d6dc9df7c 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/pl/browse.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/browse.json @@ -14,7 +14,7 @@ }, "xcom": { "columns": { - "dag": "DAG", + "dag": "Dag", "key": "Klucz", "value": "Wartość" }, diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/common.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/common.json index 5196fbfad3ce0..eb9437fb4191c 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/pl/common.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/common.json @@ -90,6 +90,11 @@ "notFound": "Nie znaleziono strony", "title": "Błąd" }, + "expand": { + "collapse": "Zwiń", + "expand": "Rozwiń", + "tooltip": "Wybierz {{hotkey}} aby przełączyć rozwijanie" + }, "expression": { "all": "Wszystkie", "and": "ORAZ", @@ -115,6 +120,7 @@ "dags": "Dagi", "docs": "Pomoc", "home": "Pulpit", + "legacyFabViews": "Widoki Fab z Airflow 2", "plugins": "Wtyczki", "security": "Dostęp" }, diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/components.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/components.json index bd634803285bf..41f585e19a598 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/pl/components.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/components.json @@ -16,7 +16,7 @@ "missingRuns": "Brakujące wykonania", "reprocessBehavior": "Zachowanie ponownego przetwarzania", "run": "Uruchom ponowne przetwarzanie", - "selectDescription": "Uruchom ten DAG dla zakresu dat", + "selectDescription": "Uruchom ten Dag dla zakresu dat", "selectLabel": "Wypełnienie wsteczne", "title": "Uruchom uzupełnienie wsteczne", "toaster": { @@ -144,7 +144,7 @@ "versionId": "Identyfikator wersji" }, "versionSelect": { - "dagVersion": "Wersja DAG", + "dagVersion": "Wersja Daga", "versionCode": "v{{versionCode}}" } } diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/dags.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/dags.json index 8288fbc42fe4e..6a2a0f0a1e2dc 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/pl/dags.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/dags.json @@ -6,9 +6,15 @@ "warning": "Wszystkie metadane, włączając metadane Dagów, wykonań i zadań zostaną usunięte." } }, + "favoriteDag": "Ulubiony Dag", "filters": { "allRunTypes": "Wszystkie Typy Wykonań", "allStates": "Wszystkie Stany", + "favorite": { + "all": "Wszystkie", + "favorite": "Ulubione", + "unfavorite": "Nieulubione" + }, "paused": { "active": "Aktywne", "all": "Wszystkie", @@ -83,5 +89,6 @@ "desc": "Sortuj według Następnego Wykonania Daga (Najnowsze-Najwcześniejsze)" }, "placeholder": "Sortuj według" - } + }, + "unfavoriteDag": "Usuń Daga z ulubionych" } diff --git a/airflow-core/src/airflow/ui/public/i18n/locales/pl/dashboard.json b/airflow-core/src/airflow/ui/public/i18n/locales/pl/dashboard.json index 72c58e1db72f8..98f1ca40f3f6c 100644 --- a/airflow-core/src/airflow/ui/public/i18n/locales/pl/dashboard.json +++ b/airflow-core/src/airflow/ui/public/i18n/locales/pl/dashboard.json @@ -1,4 +1,12 @@ { + "favorite": { + "favoriteDags_few": "Pierwsze {{count}} ulubione Dagi", + "favoriteDags_many": "Pierwsze {{count}} ulubionych Dagów", + "favoriteDags_one": "Pierwszy ulubiony DAG", + "favoriteDags_other": "Pierwsze {{count}} ulubione Dagi", + "noDagRuns": "Brak uruchomień dla tego Daga.", + "noFavoriteDags": "Brak ulubionych. Kliknij ikonę gwiazdki obok Daga na liście, aby dodać go do ulubionych." + }, "group": "Grupa", "health": { "dagProcessor": "Processor Dagów", diff --git a/airflow-core/src/airflow/ui/src/components/DagActions/FavoriteDagButton.tsx b/airflow-core/src/airflow/ui/src/components/DagActions/FavoriteDagButton.tsx new file mode 100644 index 0000000000000..8f0960fdc2a14 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/components/DagActions/FavoriteDagButton.tsx @@ -0,0 +1,64 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Box } from "@chakra-ui/react"; +import { useCallback, useMemo } from "react"; +import { useTranslation } from "react-i18next"; +import { FiStar } from "react-icons/fi"; + +import { useDagServiceGetDagsUi } from "openapi/queries"; +import { useFavoriteDag } from "src/queries/useFavoriteDag"; +import { useUnfavoriteDag } from "src/queries/useUnfavoriteDag"; + +import ActionButton from "../ui/ActionButton"; + +type FavoriteDagButtonProps = { + readonly dagId: string; + readonly withText?: boolean; +}; + +export const FavoriteDagButton = ({ dagId, withText = true }: FavoriteDagButtonProps) => { + const { t: translate } = useTranslation("dags"); + const { data: favorites } = useDagServiceGetDagsUi({ isFavorite: true }); + + const isFavorite = useMemo( + () => favorites?.dags.some((fav) => fav.dag_id === dagId) ?? false, + [favorites, dagId], + ); + + const { mutate: favoriteDag } = useFavoriteDag(); + const { mutate: unfavoriteDag } = useUnfavoriteDag(); + + const onToggle = useCallback(() => { + const mutationFn = isFavorite ? unfavoriteDag : favoriteDag; + + mutationFn({ dagId }); + }, [dagId, isFavorite, favoriteDag, unfavoriteDag]); + + return ( + + } + onClick={onToggle} + text={isFavorite ? translate("unfavoriteDag") : translate("favoriteDag")} + withText={withText} + /> + + ); +}; diff --git a/airflow-core/src/airflow/ui/src/components/Graph/TaskNode.tsx b/airflow-core/src/airflow/ui/src/components/Graph/TaskNode.tsx index 9aa68ceafcc6c..a3c7f688e798e 100644 --- a/airflow-core/src/airflow/ui/src/components/Graph/TaskNode.tsx +++ b/airflow-core/src/airflow/ui/src/components/Graph/TaskNode.tsx @@ -18,8 +18,8 @@ */ import { Box, Button, Flex, HStack, LinkOverlay, Text } from "@chakra-ui/react"; import type { NodeProps, Node as NodeType } from "@xyflow/react"; +import { useMemo } from "react"; import { useTranslation } from "react-i18next"; -import { CgRedo } from "react-icons/cg"; import { StateBadge } from "src/components/StateBadge"; import TaskInstanceTooltip from "src/components/TaskInstanceTooltip"; @@ -53,6 +53,13 @@ export const TaskNode = ({ toggleGroupId(id); } }; + const thisChildCount = useMemo( + () => + Object.entries(taskInstance?.child_states ?? {}) + .map(([_state, count]) => count) + .reduce((sum, val) => sum + val, 0), + [taskInstance], + ); return ( @@ -82,7 +89,7 @@ export const TaskNode = ({ > {taskInstance.state} - {taskInstance.try_number > 1 ? : undefined} )} {isGroup ? ( diff --git a/airflow-core/src/airflow/ui/src/components/Graph/reactflowUtils.ts b/airflow-core/src/airflow/ui/src/components/Graph/reactflowUtils.ts index cf704be094a5f..dcfd4dcc41fdc 100644 --- a/airflow-core/src/airflow/ui/src/components/Graph/reactflowUtils.ts +++ b/airflow-core/src/airflow/ui/src/components/Graph/reactflowUtils.ts @@ -19,7 +19,7 @@ import type { Node as FlowNodeType, Edge as FlowEdgeType } from "@xyflow/react"; import type { ElkExtendedEdge } from "elkjs"; -import type { GridTaskInstanceSummary, NodeResponse } from "openapi/requests/types.gen"; +import type { LightGridTaskInstanceSummary, NodeResponse } from "openapi/requests/types.gen"; import type { LayoutNode } from "./useGraphLayout"; @@ -36,7 +36,7 @@ export type CustomNodeProps = { label: string; operator?: string | null; setupTeardownType?: NodeResponse["setup_teardown_type"]; - taskInstance?: GridTaskInstanceSummary; + taskInstance?: LightGridTaskInstanceSummary; type: string; width?: number; }; diff --git a/airflow-core/src/airflow/ui/src/components/TaskInstanceTooltip.tsx b/airflow-core/src/airflow/ui/src/components/TaskInstanceTooltip.tsx index 4ecdc6fc3c23a..ae5df5b5a97e2 100644 --- a/airflow-core/src/airflow/ui/src/components/TaskInstanceTooltip.tsx +++ b/airflow-core/src/airflow/ui/src/components/TaskInstanceTooltip.tsx @@ -20,16 +20,16 @@ import { Box, Text } from "@chakra-ui/react"; import { useTranslation } from "react-i18next"; import type { + LightGridTaskInstanceSummary, TaskInstanceHistoryResponse, TaskInstanceResponse, - GridTaskInstanceSummary, } from "openapi/requests/types.gen"; import Time from "src/components/Time"; import { Tooltip, type TooltipProps } from "src/components/ui"; import { getDuration } from "src/utils"; type Props = { - readonly taskInstance?: GridTaskInstanceSummary | TaskInstanceHistoryResponse | TaskInstanceResponse; + readonly taskInstance?: LightGridTaskInstanceSummary | TaskInstanceHistoryResponse | TaskInstanceResponse; } & Omit; const TaskInstanceTooltip = ({ children, positioning, taskInstance, ...rest }: Props) => { @@ -50,21 +50,23 @@ const TaskInstanceTooltip = ({ children, positioning, taskInstance, ...rest }: P {translate("runId")}: {taskInstance.dag_run_id} ) : undefined} - - {translate("startDate")}: - - - {translate("endDate")}: - - {taskInstance.try_number > 1 && ( - - {translate("tryNumber")}: {taskInstance.try_number} - - )} {"start_date" in taskInstance ? ( - - {translate("duration")}: {getDuration(taskInstance.start_date, taskInstance.end_date)} - + <> + {taskInstance.try_number > 1 && ( + + {translate("tryNumber")}: {taskInstance.try_number} + + )} + + {translate("startDate")}: + + + {translate("endDate")}: + + + {translate("duration")}: {getDuration(taskInstance.start_date, taskInstance.end_date)} + + ) : undefined} } diff --git a/airflow-core/src/airflow/ui/src/components/TriggerDag/TriggerDAGForm.tsx b/airflow-core/src/airflow/ui/src/components/TriggerDag/TriggerDAGForm.tsx index 9abc7bffe0cfc..16f21590cf17d 100644 --- a/airflow-core/src/airflow/ui/src/components/TriggerDag/TriggerDAGForm.tsx +++ b/airflow-core/src/airflow/ui/src/components/TriggerDag/TriggerDAGForm.tsx @@ -157,7 +157,7 @@ const TriggerDAGForm = ({ dagDisplayName, dagId, isPaused, onClose, open }: Trig onChange={() => setUnpause(!unpause)} wordBreak="break-all" > - {translate("triggerDag.unpause", { dagDisplayName })} + {translate("components:triggerDag.unpause", { dagDisplayName })} ) : undefined} diff --git a/airflow-core/src/airflow/ui/src/components/renderStructuredLog.tsx b/airflow-core/src/airflow/ui/src/components/renderStructuredLog.tsx index 2c410bfd83b57..2ad930cc9bbd1 100644 --- a/airflow-core/src/airflow/ui/src/components/renderStructuredLog.tsx +++ b/airflow-core/src/airflow/ui/src/components/renderStructuredLog.tsx @@ -45,6 +45,8 @@ type RenderStructuredLogProps = { logLevelFilters?: Array; logLink: string; logMessage: string | StructuredLogMessage; + showSource?: boolean; + showTimestamp?: boolean; sourceFilters?: Array; translate: TFunction; }; @@ -95,6 +97,8 @@ export const renderStructuredLog = ({ logLevelFilters, logLink, logMessage, + showSource = true, + showTimestamp = true, sourceFilters, translate, }: RenderStructuredLogProps) => { @@ -127,7 +131,7 @@ export const renderStructuredLog = ({ return ""; } - if (Boolean(timestamp)) { + if (Boolean(timestamp) && showTimestamp) { elements.push("[", , "] "); } @@ -178,14 +182,16 @@ export const renderStructuredLog = ({ , ); - for (const key in reStructured) { - if (Object.hasOwn(reStructured, key)) { - elements.push( - ": ", - - {key === "logger" ? "source" : key}={JSON.stringify(reStructured[key])} - , - ); + if (showSource) { + for (const key in reStructured) { + if (Object.hasOwn(reStructured, key)) { + elements.push( + ": ", + + {key === "logger" ? "source" : key}={JSON.stringify(reStructured[key])} + , + ); + } } } diff --git a/airflow-core/src/airflow/ui/src/constants/searchParams.ts b/airflow-core/src/airflow/ui/src/constants/searchParams.ts index 8e1f9de51ab1f..e5e05cec60060 100644 --- a/airflow-core/src/airflow/ui/src/constants/searchParams.ts +++ b/airflow-core/src/airflow/ui/src/constants/searchParams.ts @@ -19,13 +19,16 @@ export enum SearchParamsKeys { DEPENDENCIES = "dependencies", END_DATE = "end_date", + FAVORITE = "favorite", LAST_DAG_RUN_STATE = "last_dag_run_state", LIMIT = "limit", LOG_LEVEL = "log_level", NAME_PATTERN = "name_pattern", OFFSET = "offset", + OWNERS = "owners", PAUSED = "paused", POOL = "pool", + RUN_ID_PATTERN = "run_id_pattern", RUN_TYPE = "run_type", SORT = "sort", SOURCE = "log_source", diff --git a/airflow-core/src/airflow/ui/src/hooks/usePluginTabs.tsx b/airflow-core/src/airflow/ui/src/hooks/usePluginTabs.tsx new file mode 100644 index 0000000000000..a20b7fc3a0480 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/hooks/usePluginTabs.tsx @@ -0,0 +1,64 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import type { ReactNode } from "react"; +import { LuPlug } from "react-icons/lu"; + +import { usePluginServiceGetPlugins } from "openapi/queries"; +import type { ExternalViewResponse } from "openapi/requests/types.gen"; +import { useColorMode } from "src/context/colorMode"; + +type TabPlugin = { + icon: ReactNode; + label: string; + value: string; +}; + +export const usePluginTabs = (destination: string): Array => { + const { colorMode } = useColorMode(); + const { data: pluginData } = usePluginServiceGetPlugins(); + + // Get external views with the specified destination and ensure they have url_route + const externalViews = + pluginData?.plugins + .flatMap((plugin) => plugin.external_views) + .filter((view: ExternalViewResponse) => view.destination === destination && Boolean(view.url_route)) ?? + []; + + return externalViews.map((view) => { + // Choose icon based on theme - prefer dark mode icon if available and in dark mode + let iconSrc = view.icon; + + if (colorMode === "dark" && view.icon_dark_mode !== undefined && view.icon_dark_mode !== null) { + iconSrc = view.icon_dark_mode; + } + + const icon = + iconSrc !== undefined && iconSrc !== null ? ( + {view.name} + ) : ( + + ); + + return { + icon, + label: view.name, + value: `plugin/${view.url_route}`, + }; + }); +}; diff --git a/airflow-core/src/airflow/ui/src/hooks/useSelectedVersion.ts b/airflow-core/src/airflow/ui/src/hooks/useSelectedVersion.ts index fb33fca1719a0..2ed274ef4b54a 100644 --- a/airflow-core/src/airflow/ui/src/hooks/useSelectedVersion.ts +++ b/airflow-core/src/airflow/ui/src/hooks/useSelectedVersion.ts @@ -82,7 +82,7 @@ const useSelectedVersion = (): number | undefined => { const selectedVersionNumber = selectedVersionUrl ?? - mappedTaskInstanceData?.dag_version?.version_number ?? + (mappedTaskInstanceData ? mappedTaskInstanceData.dag_version.version_number : undefined) ?? (runData?.dag_versions ?? []).at(-1)?.version_number ?? dagData?.latest_dag_version?.version_number; diff --git a/airflow-core/src/airflow/ui/src/i18n/config.ts b/airflow-core/src/airflow/ui/src/i18n/config.ts index b8baea6231241..86f25740bfcb0 100644 --- a/airflow-core/src/airflow/ui/src/i18n/config.ts +++ b/airflow-core/src/airflow/ui/src/i18n/config.ts @@ -25,6 +25,7 @@ export const supportedLanguages = [ { code: "en", name: "English" }, { code: "ar", name: "العربية" }, { code: "de", name: "Deutsch" }, + { code: "es", name: "Español" }, { code: "fr", name: "Français" }, { code: "he", name: "עברית" }, { code: "ko", name: "한국어" }, diff --git a/airflow-core/src/airflow/ui/src/layouts/Details/DetailsLayout.tsx b/airflow-core/src/airflow/ui/src/layouts/Details/DetailsLayout.tsx index a7c922454a006..887f2b7406c9d 100644 --- a/airflow-core/src/airflow/ui/src/layouts/Details/DetailsLayout.tsx +++ b/airflow-core/src/airflow/ui/src/layouts/Details/DetailsLayout.tsx @@ -28,7 +28,6 @@ import { Outlet, useParams } from "react-router-dom"; import { useLocalStorage } from "usehooks-ts"; import { useDagServiceGetDag, useDagWarningServiceListDagWarnings } from "openapi/queries"; -import type { DAGResponse } from "openapi/requests/types.gen"; import BackfillBanner from "src/components/Banner/BackfillBanner"; import { SearchDagsButton } from "src/components/SearchDags"; import TriggerDAGButton from "src/components/TriggerDag/TriggerDAGButton"; @@ -46,7 +45,6 @@ import { NavTabs } from "./NavTabs"; import { PanelButtons } from "./PanelButtons"; type Props = { - readonly dag?: DAGResponse; readonly error?: unknown; readonly isLoading?: boolean; readonly tabs: Array<{ icon: ReactNode; label: string; value: string }>; @@ -180,7 +178,7 @@ export const DetailsLayout = ({ children, error, isLoading, tabs }: Props) => { ) : undefined} - + diff --git a/airflow-core/src/airflow/ui/src/layouts/Details/Graph/Graph.tsx b/airflow-core/src/airflow/ui/src/layouts/Details/Graph/Graph.tsx index 4837e18d401ef..2d24b95a40849 100644 --- a/airflow-core/src/airflow/ui/src/layouts/Details/Graph/Graph.tsx +++ b/airflow-core/src/airflow/ui/src/layouts/Details/Graph/Graph.tsx @@ -22,11 +22,7 @@ import "@xyflow/react/dist/style.css"; import { useParams } from "react-router-dom"; import { useLocalStorage } from "usehooks-ts"; -import { - useDagRunServiceGetDagRun, - useGridServiceGridData, - useStructureServiceStructureData, -} from "openapi/queries"; +import { useStructureServiceStructureData } from "openapi/queries"; import { DownloadButton } from "src/components/Graph/DownloadButton"; import { edgeTypes, nodeTypes } from "src/components/Graph/graphTypes"; import type { CustomNodeProps } from "src/components/Graph/reactflowUtils"; @@ -35,7 +31,7 @@ import { useColorMode } from "src/context/colorMode"; import { useOpenGroups } from "src/context/openGroups"; import useSelectedVersion from "src/hooks/useSelectedVersion"; import { useDependencyGraph } from "src/queries/useDependencyGraph"; -import { isStatePending, useAutoRefresh } from "src/utils"; +import { useGridTiSummaries } from "src/queries/useGridTISummaries.ts"; const nodeColor = ( { data: { depth, height, isOpen, taskInstance, width }, type }: ReactFlowNode, @@ -76,7 +72,6 @@ export const Graph = () => { ]); const { openGroupIds } = useOpenGroups(); - const refetchInterval = useAutoRefresh({ dagId }); const [dependencies] = useLocalStorage<"all" | "immediate" | "tasks">(`dependencies-${dagId}`, "tasks"); const [direction] = useLocalStorage(`direction-${dagId}`, "RIGHT"); @@ -93,15 +88,6 @@ export const Graph = () => { enabled: dependencies === "all", }); - const { data: dagRun } = useDagRunServiceGetDagRun( - { - dagId, - dagRunId: runId, - }, - undefined, - { enabled: runId !== "" }, - ); - const dagDepEdges = dependencies === "all" ? dagDependencies.edges : []; const dagDepNodes = dependencies === "all" ? dagDependencies.nodes : []; @@ -117,28 +103,11 @@ export const Graph = () => { versionNumber: selectedVersion, }); - // Filter grid data to get only a single dag run - const { data: gridData } = useGridServiceGridData( - { - dagId, - limit: 1, - offset: 0, - runAfterGte: dagRun?.run_after, - runAfterLte: dagRun?.run_after, - }, - undefined, - { - enabled: dagRun !== undefined, - refetchInterval: (query) => - query.state.data?.dag_runs.some((dr) => isStatePending(dr.state)) && refetchInterval, - }, - ); - - const gridRun = gridData?.dag_runs.find((dr) => dr.dag_run_id === runId); + const { data: gridTISummaries } = useGridTiSummaries({ dagId, runId }); // Add task instances to the node data but without having to recalculate how the graph is laid out const nodes = data?.nodes.map((node) => { - const taskInstance = gridRun?.task_instances.find((ti) => ti.task_id === node.id); + const taskInstance = gridTISummaries?.task_instances.find((ti) => ti.task_id === node.id); return { ...node, diff --git a/airflow-core/src/airflow/ui/src/layouts/Details/Grid/Bar.tsx b/airflow-core/src/airflow/ui/src/layouts/Details/Grid/Bar.tsx index 933a0490ef9eb..3ddcdee64a5de 100644 --- a/airflow-core/src/airflow/ui/src/layouts/Details/Grid/Bar.tsx +++ b/airflow-core/src/airflow/ui/src/layouts/Details/Grid/Bar.tsx @@ -65,7 +65,7 @@ export const Bar = ({ max, nodes, run }: Props) => { color="white" dagId={dagId} flexDir="column" - height={`${((run.duration ?? 0) / max) * BAR_HEIGHT}px`} + height={`${(run.duration / max) * BAR_HEIGHT}px`} justifyContent="flex-end" label={run.run_after} minHeight="14px" diff --git a/airflow-core/src/airflow/ui/src/layouts/Details/Grid/TaskNames.tsx b/airflow-core/src/airflow/ui/src/layouts/Details/Grid/TaskNames.tsx index 014ac8f5fd427..fa70ab2d16b37 100644 --- a/airflow-core/src/airflow/ui/src/layouts/Details/Grid/TaskNames.tsx +++ b/airflow-core/src/airflow/ui/src/layouts/Details/Grid/TaskNames.tsx @@ -68,7 +68,7 @@ export const TaskNames = ({ nodes }: Props) => { > {node.isGroup ? ( - + { [useDagServiceGetDagsUiKey], UseDagRunServiceGetDagRunsKeyFn({ dagId }, [{ dagId }]), UseTaskInstanceServiceGetTaskInstancesKeyFn({ dagId, dagRunId: "~" }, [{ dagId, dagRunId: "~" }]), - UseGridServiceGridDataKeyFn({ dagId }, [{ dagId }]), ]; toaster.create({ diff --git a/airflow-core/src/airflow/ui/src/pages/Dag/Dag.tsx b/airflow-core/src/airflow/ui/src/pages/Dag/Dag.tsx index cca182140b8a7..dbc9b84e12e7f 100644 --- a/airflow-core/src/airflow/ui/src/pages/Dag/Dag.tsx +++ b/airflow-core/src/airflow/ui/src/pages/Dag/Dag.tsx @@ -28,6 +28,7 @@ import { useParams } from "react-router-dom"; import { useDagServiceGetDagDetails, useDagServiceGetDagsUi } from "openapi/queries"; import type { DAGWithLatestDagRunsResponse } from "openapi/requests/types.gen"; import { TaskIcon } from "src/assets/TaskIcon"; +import { usePluginTabs } from "src/hooks/usePluginTabs"; import { DetailsLayout } from "src/layouts/Details/DetailsLayout"; import { useRefreshOnNewDagRuns } from "src/queries/useRefreshOnNewDagRuns"; import { isStatePending, useAutoRefresh } from "src/utils"; @@ -38,6 +39,9 @@ export const Dag = () => { const { t: translate } = useTranslation("dag"); const { dagId = "" } = useParams(); + // Get external views with dag destination + const externalTabs = usePluginTabs("dag"); + const tabs = [ { icon: , label: translate("tabs.overview"), value: "" }, { icon: , label: translate("tabs.runs"), value: "runs" }, @@ -46,6 +50,7 @@ export const Dag = () => { { icon: , label: translate("tabs.auditLog"), value: "events" }, { icon: , label: translate("tabs.code"), value: "code" }, { icon: , label: translate("tabs.details"), value: "details" }, + ...externalTabs, ]; const { @@ -93,12 +98,7 @@ export const Dag = () => { return ( - +
)} + ) diff --git a/airflow-core/src/airflow/ui/src/pages/DagRuns.tsx b/airflow-core/src/airflow/ui/src/pages/DagRuns.tsx index 3e46a185be366..3afeab6ded11c 100644 --- a/airflow-core/src/airflow/ui/src/pages/DagRuns.tsx +++ b/airflow-core/src/airflow/ui/src/pages/DagRuns.tsx @@ -18,13 +18,12 @@ * specific language governing permissions and limitations * under the License. */ -import { Flex, HStack, Link, type SelectValueChangeDetails, Text } from "@chakra-ui/react"; +import { Flex, HStack, Link, type SelectValueChangeDetails, Text, Box } from "@chakra-ui/react"; import type { ColumnDef } from "@tanstack/react-table"; import type { TFunction } from "i18next"; import { useCallback } from "react"; import { useTranslation } from "react-i18next"; import { Link as RouterLink, useParams, useSearchParams } from "react-router-dom"; -import { useLocalStorage } from "usehooks-ts"; import { useDagRunServiceGetDagRuns } from "openapi/queries"; import type { DAGRunResponse, DagRunState, DagRunType } from "openapi/requests/types.gen"; @@ -37,6 +36,7 @@ import { LimitedItemsList } from "src/components/LimitedItemsList"; import { MarkRunAsButton } from "src/components/MarkAs"; import RenderedJsonField from "src/components/RenderedJsonField"; import { RunTypeIcon } from "src/components/RunTypeIcon"; +import { SearchBar } from "src/components/SearchBar"; import { StateBadge } from "src/components/StateBadge"; import Time from "src/components/Time"; import { Select } from "src/components/ui"; @@ -48,6 +48,7 @@ import { renderDuration, useAutoRefresh, isStatePending } from "src/utils"; type DagRunRow = { row: { original: DAGRunResponse } }; const { END_DATE: END_DATE_PARAM, + RUN_ID_PATTERN: RUN_ID_PATTERN_PARAM, RUN_TYPE: RUN_TYPE_PARAM, START_DATE: START_DATE_PARAM, STATE: STATE_PARAM, @@ -157,28 +158,30 @@ export const DagRuns = () => { const [sort] = sorting; const orderBy = sort ? `${sort.desc ? "-" : ""}${sort.id}` : "-run_after"; + const { pageIndex, pageSize } = pagination; const filteredState = searchParams.get(STATE_PARAM); const filteredType = searchParams.get(RUN_TYPE_PARAM); + const filteredRunIdPattern = searchParams.get(RUN_ID_PATTERN_PARAM); const startDate = searchParams.get(START_DATE_PARAM); const endDate = searchParams.get(END_DATE_PARAM); const refetchInterval = useAutoRefresh({}); - const [limit] = useLocalStorage(`dag_runs_limit-${dagId}`, 10); const { data, error, isLoading } = useDagRunServiceGetDagRuns( { dagId: dagId ?? "~", endDateLte: endDate ?? undefined, - limit, - offset: pagination.pageIndex * pagination.pageSize, + limit: pageSize, + offset: pageIndex * pageSize, orderBy, + runIdPattern: filteredRunIdPattern ?? undefined, runType: filteredType === null ? undefined : [filteredType], startDateGte: startDate ?? undefined, state: filteredState === null ? undefined : [filteredState], }, undefined, { - enabled: !isNaN(pagination.pageSize), + enabled: !isNaN(pageSize), refetchInterval: (query) => query.state.data?.dag_runs.some((run) => isStatePending(run.state)) ? refetchInterval : false, }, @@ -220,9 +223,34 @@ export const DagRuns = () => { [pagination, searchParams, setSearchParams, setTableURLState, sorting], ); + const handleRunIdPatternChange = useCallback( + (value: string) => { + if (value === "") { + searchParams.delete(RUN_ID_PATTERN_PARAM); + } else { + searchParams.set(RUN_ID_PATTERN_PARAM, value); + } + setTableURLState({ + pagination: { ...pagination, pageIndex: 0 }, + sorting, + }); + setSearchParams(searchParams); + }, + [pagination, searchParams, setSearchParams, setTableURLState, sorting], + ); + return ( <> - + + + + { ))} + - + {() => filteredType === null ? ( @@ -289,7 +318,7 @@ export const DagRuns = () => { ))} - + { pr={2} /> + diff --git a/airflow-core/src/airflow/ui/src/pages/DagsList/DagOwners.tsx b/airflow-core/src/airflow/ui/src/pages/DagsList/DagOwners.tsx index e347666759c24..5eb12f3d1eade 100644 --- a/airflow-core/src/airflow/ui/src/pages/DagsList/DagOwners.tsx +++ b/airflow-core/src/airflow/ui/src/pages/DagsList/DagOwners.tsx @@ -16,8 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -import { Link, Text } from "@chakra-ui/react"; +import { Link } from "@chakra-ui/react"; import { useTranslation } from "react-i18next"; +import { Link as RouterLink } from "react-router-dom"; import { LimitedItemsList } from "src/components/LimitedItemsList"; @@ -33,14 +34,15 @@ export const DagOwners = ({ }) => { const { t: translate } = useTranslation("dags"); const items = owners.map((owner) => { - const link = ownerLinks?.[owner]; - const hasOwnerLink = link !== undefined; + const ownerLink = ownerLinks?.[owner]; + const ownerFilterLink = `/dags?owners=${owner}`; + const hasOwnerLink = ownerLink !== undefined; return hasOwnerLink ? ( ) : ( - - {owner} - + + {owner} + ); }); diff --git a/airflow-core/src/airflow/ui/src/pages/DagsList/DagsFilters/DagsFilters.tsx b/airflow-core/src/airflow/ui/src/pages/DagsList/DagsFilters/DagsFilters.tsx index 958f3d68997b4..9803124673a24 100644 --- a/airflow-core/src/airflow/ui/src/pages/DagsList/DagsFilters/DagsFilters.tsx +++ b/airflow-core/src/airflow/ui/src/pages/DagsList/DagsFilters/DagsFilters.tsx @@ -26,12 +26,14 @@ import { SearchParamsKeys, type SearchParamsKeysType } from "src/constants/searc import { useConfig } from "src/queries/useConfig"; import { useDagTagsInfinite } from "src/queries/useDagTagsInfinite"; +import { FavoriteFilter } from "./FavoriteFilter"; import { PausedFilter } from "./PausedFilter"; import { ResetButton } from "./ResetButton"; import { StateFilters } from "./StateFilters"; import { TagFilter } from "./TagFilter"; const { + FAVORITE: FAVORITE_PARAM, LAST_DAG_RUN_STATE: LAST_DAG_RUN_STATE_PARAM, OFFSET: OFFSET_PARAM, PAUSED: PAUSED_PARAM, @@ -39,7 +41,14 @@ const { TAGS_MATCH_MODE: TAGS_MATCH_MODE_PARAM, }: SearchParamsKeysType = SearchParamsKeys; -const getFilterCount = (state: string | null, showPaused: string | null, selectedTags: Array) => { +type FilterOptions = { + selectedTags: Array; + showFavorites: string | null; + showPaused: string | null; + state: string | null; +}; + +const getFilterCount = ({ selectedTags, showFavorites, showPaused, state }: FilterOptions) => { let count = 0; if (state !== null) { @@ -51,6 +60,9 @@ const getFilterCount = (state: string | null, showPaused: string | null, selecte if (selectedTags.length > 0) { count += 1; } + if (showFavorites !== null) { + count += 1; + } return count; }; @@ -59,6 +71,7 @@ export const DagsFilters = () => { const [searchParams, setSearchParams] = useSearchParams(); const showPaused = searchParams.get(PAUSED_PARAM); + const showFavorites = searchParams.get(FAVORITE_PARAM); const state = searchParams.get(LAST_DAG_RUN_STATE_PARAM); const selectedTags = searchParams.getAll(TAGS_PARAM); const tagFilterMode = searchParams.get(TAGS_MATCH_MODE_PARAM) ?? "any"; @@ -100,6 +113,25 @@ export const DagsFilters = () => { [pagination, searchParams, setSearchParams, setTableURLState, sorting], ); + const handleFavoriteChange = useCallback( + ({ value }: { value: Array }) => { + const [val] = value; + + if (val === undefined || val === "all") { + searchParams.delete(FAVORITE_PARAM); + } else { + searchParams.set(FAVORITE_PARAM, val); + } + setTableURLState({ + pagination: { ...pagination, pageIndex: 0 }, + sorting, + }); + searchParams.delete(OFFSET_PARAM); + setSearchParams(searchParams); + }, + [pagination, searchParams, setSearchParams, setTableURLState, sorting], + ); + const handleStateChange: React.MouseEventHandler = useCallback( ({ currentTarget: { value } }) => { if (value === "all") { @@ -139,6 +171,7 @@ export const DagsFilters = () => { const onClearFilters = () => { searchParams.delete(PAUSED_PARAM); + searchParams.delete(FAVORITE_PARAM); searchParams.delete(LAST_DAG_RUN_STATE_PARAM); searchParams.delete(TAGS_PARAM); searchParams.delete(TAGS_MATCH_MODE_PARAM); @@ -157,7 +190,12 @@ export const DagsFilters = () => { [searchParams, setSearchParams], ); - const filterCount = getFilterCount(state, showPaused, selectedTags); + const filterCount = getFilterCount({ + selectedTags, + showFavorites, + showPaused, + state, + }); return ( @@ -188,6 +226,7 @@ export const DagsFilters = () => { tagFilterMode={tagFilterMode} tags={data?.pages.flatMap((dagResponse) => dagResponse.tags) ?? []} /> + diff --git a/airflow-core/src/airflow/ui/src/pages/DagsList/DagsFilters/FavoriteFilter.tsx b/airflow-core/src/airflow/ui/src/pages/DagsList/DagsFilters/FavoriteFilter.tsx new file mode 100644 index 0000000000000..6b356c3a8bfce --- /dev/null +++ b/airflow-core/src/airflow/ui/src/pages/DagsList/DagsFilters/FavoriteFilter.tsx @@ -0,0 +1,58 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { createListCollection, type SelectValueChangeDetails } from "@chakra-ui/react"; +import { useTranslation } from "react-i18next"; + +import { Select } from "src/components/ui"; + +type Props = { + readonly onFavoriteChange: (details: SelectValueChangeDetails) => void; + readonly showFavorites: string | null; +}; + +export const FavoriteFilter = ({ onFavoriteChange, showFavorites }: Props) => { + const { t: translate } = useTranslation("dags"); + + const enabledOptions = createListCollection({ + items: [ + { label: translate("filters.favorite.all"), value: "all" }, + { label: translate("filters.favorite.favorite"), value: "true" }, + { label: translate("filters.favorite.unfavorite"), value: "false" }, + ], + }); + + return ( + + + + + + {enabledOptions.items.map((option) => ( + + {option.label} + + ))} + + + ); +}; diff --git a/airflow-core/src/airflow/ui/src/pages/DagsList/DagsList.tsx b/airflow-core/src/airflow/ui/src/pages/DagsList/DagsList.tsx index 046fe5d14aea6..f08a1fd0e5132 100644 --- a/airflow-core/src/airflow/ui/src/pages/DagsList/DagsList.tsx +++ b/airflow-core/src/airflow/ui/src/pages/DagsList/DagsList.tsx @@ -1,3 +1,5 @@ +/* eslint-disable max-lines */ + /*! * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file @@ -33,6 +35,7 @@ import { useLocalStorage } from "usehooks-ts"; import type { DagRunState, DAGWithLatestDagRunsResponse } from "openapi/requests/types.gen"; import DeleteDagButton from "src/components/DagActions/DeleteDagButton"; +import { FavoriteDagButton } from "src/components/DagActions/FavoriteDagButton"; import DagRunInfo from "src/components/DagRunInfo"; import { DataTable } from "src/components/DataTable"; import { ToggleTableDisplay } from "src/components/DataTable/ToggleTableDisplay"; @@ -42,7 +45,7 @@ import { ErrorAlert } from "src/components/ErrorAlert"; import { SearchBar } from "src/components/SearchBar"; import { TogglePause } from "src/components/TogglePause"; import TriggerDAGButton from "src/components/TriggerDag/TriggerDAGButton"; -import { SearchParamsKeys, type SearchParamsKeysType } from "src/constants/searchParams"; +import { SearchParamsKeys } from "src/constants/searchParams"; import { DagsLayout } from "src/layouts/DagsLayout"; import { useConfig } from "src/queries/useConfig"; import { useDags } from "src/queries/useDags"; @@ -132,6 +135,13 @@ const createColumns = ( enableSorting: false, header: "", }, + { + accessorKey: "favorite", + cell: ({ row: { original } }) => , + enableHiding: false, + enableSorting: false, + header: "", + }, { accessorKey: "delete", cell: ({ row: { original } }) => ( @@ -142,13 +152,8 @@ const createColumns = ( }, ]; -const { - LAST_DAG_RUN_STATE: LAST_DAG_RUN_STATE_PARAM, - NAME_PATTERN: NAME_PATTERN_PARAM, - PAUSED: PAUSED_PARAM, - TAGS: TAGS_PARAM, - TAGS_MATCH_MODE: TAGS_MATCH_MODE_PARAM, -}: SearchParamsKeysType = SearchParamsKeys; +const { FAVORITE, LAST_DAG_RUN_STATE, NAME_PATTERN, OWNERS, PAUSED, TAGS, TAGS_MATCH_MODE } = + SearchParamsKeys; const cardDef: CardDef = { card: ({ row }) => , @@ -168,17 +173,19 @@ export const DagsList = () => { const hidePausedDagsByDefault = Boolean(useConfig("hide_paused_dags_by_default")); const defaultShowPaused = hidePausedDagsByDefault ? false : undefined; - const showPaused = searchParams.get(PAUSED_PARAM); + const showPaused = searchParams.get(PAUSED); + const showFavorites = searchParams.get(FAVORITE); - const lastDagRunState = searchParams.get(LAST_DAG_RUN_STATE_PARAM) as DagRunState; - const selectedTags = searchParams.getAll(TAGS_PARAM); - const selectedMatchMode = searchParams.get(TAGS_MATCH_MODE_PARAM) as "all" | "any"; + const lastDagRunState = searchParams.get(LAST_DAG_RUN_STATE) as DagRunState; + const selectedTags = searchParams.getAll(TAGS); + const selectedMatchMode = searchParams.get(TAGS_MATCH_MODE) as "all" | "any"; + const owners = searchParams.getAll(OWNERS); const { setTableURLState, tableURLState } = useTableURLState(); const { pagination, sorting } = tableURLState; const [dagDisplayNamePattern, setDagDisplayNamePattern] = useState( - searchParams.get(NAME_PATTERN_PARAM) ?? undefined, + searchParams.get(NAME_PATTERN) ?? undefined, ); const [sort] = sorting; @@ -188,9 +195,9 @@ export const DagsList = () => { const handleSearchChange = (value: string) => { if (value) { - searchParams.set(NAME_PATTERN_PARAM, value); + searchParams.set(NAME_PATTERN, value); } else { - searchParams.delete(NAME_PATTERN_PARAM); + searchParams.delete(NAME_PATTERN); } setSearchParams(searchParams); setTableURLState({ @@ -201,6 +208,7 @@ export const DagsList = () => { }; let paused = defaultShowPaused; + let isFavorite = undefined; if (showPaused === "all") { paused = undefined; @@ -210,13 +218,21 @@ export const DagsList = () => { paused = false; } + if (showFavorites === "true") { + isFavorite = true; + } else if (showFavorites === "false") { + isFavorite = false; + } + const { data, error, isLoading } = useDags({ dagDisplayNamePattern: Boolean(dagDisplayNamePattern) ? `${dagDisplayNamePattern}` : undefined, dagRunsLimit, + isFavorite, lastDagRunState, limit: pagination.pageSize, offset: pagination.pageIndex * pagination.pageSize, orderBy, + owners, paused, tags: selectedTags, tagsMatchMode: selectedMatchMode, diff --git a/airflow-core/src/airflow/ui/src/pages/Dashboard/Dashboard.tsx b/airflow-core/src/airflow/ui/src/pages/Dashboard/Dashboard.tsx index 18e69dc5e9492..964b62d056b40 100644 --- a/airflow-core/src/airflow/ui/src/pages/Dashboard/Dashboard.tsx +++ b/airflow-core/src/airflow/ui/src/pages/Dashboard/Dashboard.tsx @@ -24,6 +24,7 @@ import ReactMarkdown from "src/components/ReactMarkdown"; import { Accordion, Alert } from "src/components/ui"; import { useConfig } from "src/queries/useConfig"; +import { FavoriteDags } from "./FavoriteDags"; import { Health } from "./Health"; import { HistoricalMetrics } from "./HistoricalMetrics"; import { PoolSummary } from "./PoolSummary"; @@ -64,6 +65,9 @@ export const Dashboard = () => { + + + diff --git a/airflow-core/src/airflow/ui/src/pages/Dashboard/FavoriteDags/FavoriteDagCard.tsx b/airflow-core/src/airflow/ui/src/pages/Dashboard/FavoriteDags/FavoriteDagCard.tsx new file mode 100644 index 0000000000000..2bbf4de24f316 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/pages/Dashboard/FavoriteDags/FavoriteDagCard.tsx @@ -0,0 +1,69 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Box, Text, VStack } from "@chakra-ui/react"; +import { useTranslation } from "react-i18next"; +import { Link } from "react-router-dom"; + +import type { DAGRunResponse } from "openapi/requests/types.gen"; +import { RecentRuns } from "src/pages/DagsList/RecentRuns"; + +type FavoriteDagProps = { + readonly dagId: string; + readonly dagName: string; + readonly latestRuns: Array; +}; + +export const FavoriteDagCard = ({ dagId, dagName, latestRuns }: FavoriteDagProps) => { + const { t: translate } = useTranslation("dashboard"); + + return ( + + + {latestRuns.length > 0 ? ( + + ) : ( + + {translate("favorite.noDagRuns")} + + )} + + + {dagName} + + + + + ); +}; diff --git a/airflow-core/src/airflow/ui/src/pages/Dashboard/FavoriteDags/FavoriteDags.tsx b/airflow-core/src/airflow/ui/src/pages/Dashboard/FavoriteDags/FavoriteDags.tsx new file mode 100644 index 0000000000000..2f54d45056680 --- /dev/null +++ b/airflow-core/src/airflow/ui/src/pages/Dashboard/FavoriteDags/FavoriteDags.tsx @@ -0,0 +1,63 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Box, Flex, Heading, SimpleGrid, Text } from "@chakra-ui/react"; +import { useTranslation } from "react-i18next"; +import { FiStar } from "react-icons/fi"; + +import { useDagServiceGetDagsUi } from "openapi/queries"; + +import { FavoriteDagCard } from "./FavoriteDagCard"; + +export const FavoriteDags = () => { + const { t: translate } = useTranslation("dashboard"); + const LIMIT = 10; + const { data: favorites } = useDagServiceGetDagsUi({ isFavorite: true, limit: LIMIT }); + + if (!favorites) { + return undefined; + } + + return ( + + + + + {translate("favorite.favoriteDags", { count: LIMIT })} + + + + {favorites.dags.length === 0 ? ( + + {translate("favorite.noFavoriteDags")} + + ) : ( + + {favorites.dags.map((dag) => ( + + ))} + + )} + + ); +}; diff --git a/airflow-core/src/airflow/ui/src/pages/Dashboard/FavoriteDags/index.ts b/airflow-core/src/airflow/ui/src/pages/Dashboard/FavoriteDags/index.ts new file mode 100644 index 0000000000000..4c8f5fd306a0f --- /dev/null +++ b/airflow-core/src/airflow/ui/src/pages/Dashboard/FavoriteDags/index.ts @@ -0,0 +1,20 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export { FavoriteDags } from "./FavoriteDags"; diff --git a/airflow-core/src/airflow/ui/src/pages/Iframe.tsx b/airflow-core/src/airflow/ui/src/pages/Iframe.tsx index bac9843ce85b6..91b804ed550a3 100644 --- a/airflow-core/src/airflow/ui/src/pages/Iframe.tsx +++ b/airflow-core/src/airflow/ui/src/pages/Iframe.tsx @@ -27,7 +27,7 @@ import { ErrorPage } from "./Error"; export const Iframe = ({ sandbox = "allow-same-origin allow-forms" }: { readonly sandbox: string }) => { const { t: translate } = useTranslation(); - const { page } = useParams(); + const { dagId, mapIndex, page, runId, taskId } = useParams(); const { data: pluginData, isLoading } = usePluginServiceGetPlugins(); const iframeView = @@ -49,12 +49,41 @@ export const Iframe = ({ sandbox = "allow-same-origin allow-forms" }: { readonly return ; } + // Build the href URL with context parameters if the view has a destination + let src = iframeView.href; + + if (iframeView.destination !== undefined && iframeView.destination !== "nav") { + // Check if the href contains placeholders that need to be replaced + if (dagId !== undefined) { + src = src.replaceAll("{DAG_ID}", dagId); + } + if (runId !== undefined) { + src = src.replaceAll("{RUN_ID}", runId); + } + if (taskId !== undefined) { + src = src.replaceAll("{TASK_ID}", taskId); + } + if (mapIndex !== undefined) { + src = src.replaceAll("{MAP_INDEX}", mapIndex); + } + } + return ( - +