diff --git a/.github/.pre-commit-config.yaml b/.github/.pre-commit-config.yaml new file mode 100644 index 0000000000000..909f0c1cdca3c --- /dev/null +++ b/.github/.pre-commit-config.yaml @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +default_stages: [manual] +default_language_version: + python: python311 +minimum_pre_commit_version: '3.2.0' +repos: + - repo: https://github.com/eclipse-csi/octopin + rev: 21360742e352e87450f99e180fdfc2cf774a72a3 + hooks: + - id: pin-versions + name: Pin versions of dependencies in CI workflows (manual) + stages: ['manual'] diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 7a97a52539cac..69f10c58301a7 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -23,7 +23,7 @@ # API /airflow-core/src/airflow/api/ @ephraimbuddy @pierrejeambrun @rawwar @jason810496 -/airflow-core/src/airflow/api_fastapi/ @ephraimbuddy @pierrejeambrun @rawwar @jason810496 @bugraoz93 +/airflow-core/src/airflow/api_fastapi/ @ephraimbuddy @pierrejeambrun @rawwar @jason810496 @bugraoz93 @shubhamraj-git /airflow-core/src/airflow/api_fastapi/execution_api/ @ashb @kaxil @amoghrajesh # Airflow CTL @@ -33,7 +33,18 @@ /airflow-core/src/airflow/api_fastapi/auth/ @vincbeck # UI -/airflow-core/src/airflow/ui/ @bbovenzi @pierrejeambrun @ryanahamilton @jscheffl +/airflow-core/src/airflow/ui/ @bbovenzi @pierrejeambrun @ryanahamilton @jscheffl @shubhamraj-git + +# Translation Owners (i18n) +# Note: Non committer engaged translators are listed in comments prevent making file syntax invalid +# See: https://github.com/apache/airflow/blob/main/airflow-core/src/airflow/ui/public/i18n/README.md#43-engaged-translator +airflow-core/src/airflow/ui/public/i18n/locales/ar/ @shahar1 @hussein-awala # + @ahmadtfarhan +airflow-core/src/airflow/ui/public/i18n/locales/de/ @jscheffl # + @TJaniF @m1racoli +airflow-core/src/airflow/ui/public/i18n/locales/he/ @eladkal @shahar1 @romsharon98 # +@Dev-iL +airflow-core/src/airflow/ui/public/i18n/locales/nl/ @BasPH # + @DjVinnii +airflow-core/src/airflow/ui/public/i18n/locales/pl/ @potiuk @mobuchowski # + @kacpermuda +airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/ @Lee-W @jason810496 # + @RoyLee1224 @guan404ming +airflow-core/src/airflow/ui/public/i18n/locales/fr/ @pierrejeambrun @vincbeck # Security/Permissions /airflow-core/src/airflow/security/permissions.py @vincbeck @@ -69,6 +80,7 @@ /providers/edge3/ @jscheffl /providers/fab/ @vincbeck /providers/hashicorp/ @hussein-awala +/providers/keycloak/ @vincbeck /providers/openlineage/ @mobuchowski /providers/slack/ @eladkal /providers/smtp/ @hussein-awala @@ -77,7 +89,7 @@ # Dev tools /.github/workflows/ @potiuk @ashb @gopidesupavan -/dev/ @potiuk @ashb @jedcunningham @gopidesupavan +/dev/ @potiuk @ashb @jedcunningham @gopidesupavan @amoghrajesh /docker-tests/ @potiuk @ashb @gopidesupavan @jason810496 /kubernetes-tests/ @potiuk @ashb @gopidesupavan @jason810496 /helm-tests/ @dstandish @jedcunningham @@ -109,4 +121,8 @@ ISSUE_TRIAGE_PROCESS.rst @eladkal /providers/fab/src/airflow-core/src/airflow/providers/fab/migrations/ @ephraimbuddy # AIP-72 - Task SDK +# Python SDK /task-sdk/ @ashb @kaxil @amoghrajesh + +# Golang SDK +/go-sdk/ @ashb @kaxil @amoghrajesh diff --git a/.github/ISSUE_TEMPLATE/airflow_bug_report.yml b/.github/ISSUE_TEMPLATE/1-airflow_bug_report.yml similarity index 99% rename from .github/ISSUE_TEMPLATE/airflow_bug_report.yml rename to .github/ISSUE_TEMPLATE/1-airflow_bug_report.yml index bd038baf6346b..862037f29bac9 100644 --- a/.github/ISSUE_TEMPLATE/airflow_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/1-airflow_bug_report.yml @@ -25,7 +25,7 @@ body: the latest release or main to see if the issue is fixed before reporting it. multiple: false options: - - "3.0.1" + - "3.0.2" - "2.11.0" - "main (development)" - "Other Airflow 2 version (please specify below)" diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/2-feature_request.yml similarity index 100% rename from .github/ISSUE_TEMPLATE/feature_request.yml rename to .github/ISSUE_TEMPLATE/2-feature_request.yml diff --git a/.github/ISSUE_TEMPLATE/airflow_providers_bug_report.yml b/.github/ISSUE_TEMPLATE/3-airflow_providers_bug_report.yml similarity index 99% rename from .github/ISSUE_TEMPLATE/airflow_providers_bug_report.yml rename to .github/ISSUE_TEMPLATE/3-airflow_providers_bug_report.yml index a2461de6285e7..844ea18ea4d89 100644 --- a/.github/ISSUE_TEMPLATE/airflow_providers_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/3-airflow_providers_bug_report.yml @@ -41,6 +41,7 @@ body: - apache-pig - apache-pinot - apache-spark + - apache-tinkerpop - apprise - arangodb - asana @@ -75,6 +76,7 @@ body: - influxdb - jdbc - jenkins + - keycloak - microsoft-azure - microsoft-mssql - microsoft-psrp diff --git a/.github/ISSUE_TEMPLATE/airflow_helmchart_bug_report.yml b/.github/ISSUE_TEMPLATE/4-airflow_helmchart_bug_report.yml similarity index 99% rename from .github/ISSUE_TEMPLATE/airflow_helmchart_bug_report.yml rename to .github/ISSUE_TEMPLATE/4-airflow_helmchart_bug_report.yml index 3c8b7e68a82bf..bb8abfe40a9ee 100644 --- a/.github/ISSUE_TEMPLATE/airflow_helmchart_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/4-airflow_helmchart_bug_report.yml @@ -28,7 +28,8 @@ body: What Apache Airflow Helm Chart version are you using? multiple: false options: - - "1.16.0 (latest released)" + - "1.17.0 (latest released)" + - "1.16.0" - "1.15.0" - "1.14.0" - "1.13.1" diff --git a/.github/ISSUE_TEMPLATE/airflow_doc_issue_report.yml b/.github/ISSUE_TEMPLATE/5-airflow_doc_issue_report.yml similarity index 100% rename from .github/ISSUE_TEMPLATE/airflow_doc_issue_report.yml rename to .github/ISSUE_TEMPLATE/5-airflow_doc_issue_report.yml diff --git a/.github/ISSUE_TEMPLATE/~free_form.yml b/.github/ISSUE_TEMPLATE/6-free_form.yml similarity index 100% rename from .github/ISSUE_TEMPLATE/~free_form.yml rename to .github/ISSUE_TEMPLATE/6-free_form.yml diff --git a/.github/actions/install-pre-commit/action.yml b/.github/actions/install-pre-commit/action.yml index 5289389930c15..e053e8e90571c 100644 --- a/.github/actions/install-pre-commit/action.yml +++ b/.github/actions/install-pre-commit/action.yml @@ -24,13 +24,16 @@ inputs: default: "3.9" uv-version: description: 'uv version to use' - default: "0.7.8" # Keep this comment to allow automatic replacement of uv version + default: "0.7.14" # Keep this comment to allow automatic replacement of uv version pre-commit-version: description: 'pre-commit version to use' default: "4.2.0" # Keep this comment to allow automatic replacement of pre-commit version pre-commit-uv-version: description: 'pre-commit-uv version to use' default: "4.1.4" # Keep this comment to allow automatic replacement of pre-commit-uv version + skip-pre-commits: + description: "Skip some pre-commits from installation" + default: "" runs: using: "composite" steps: @@ -40,6 +43,7 @@ runs: UV_VERSION: ${{inputs.uv-version}} PRE_COMMIT_VERSION: ${{inputs.pre-commit-version}} PRE_COMMIT_UV_VERSION: ${{inputs.pre-commit-uv-version}} + SKIP: ${{ inputs.skip-pre-commits }} run: | pip install uv==${UV_VERSION} || true uv tool install pre-commit==${PRE_COMMIT_VERSION} --with uv==${UV_VERSION} \ @@ -86,3 +90,5 @@ runs: shell: bash run: pre-commit install-hooks || (cat ~/.cache/pre-commit/pre-commit.log && exit 1) working-directory: ${{ github.workspace }} + env: + SKIP: ${{ inputs.skip-pre-commits }} diff --git a/.github/actions/post_tests_success/action.yml b/.github/actions/post_tests_success/action.yml index 865f1e4857cb2..36ee429477733 100644 --- a/.github/actions/post_tests_success/action.yml +++ b/.github/actions/post_tests_success/action.yml @@ -44,7 +44,7 @@ runs: mkdir ./files/coverage-reports mv ./files/coverage*.xml ./files/coverage-reports/ || true - name: "Upload all coverage reports to codecov" - uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238 # v4 + uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238 env: CODECOV_TOKEN: ${{ inputs.codecov-token }} if: env.ENABLE_COVERAGE == 'true' && env.TEST_TYPES != 'Helm' && inputs.python-version != '3.12' diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index cff760ad9dc38..d857b8bf0eb06 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -72,6 +72,9 @@ labelPRBasedOnFilePath: provider:apache-spark: - providers/apache/spark/** + provider:apache-tinkerpop: + - providers/apache/tinkerpop/** + provider:apprise: - providers/apprise/** @@ -108,9 +111,6 @@ labelPRBasedOnFilePath: provider:common-sql: - providers/common/sql/** - provider:standard: - - providers/standard/** - provider:databricks: - providers/databricks/** @@ -216,7 +216,7 @@ labelPRBasedOnFilePath: provider:opsgenie: - providers/opsgenie/** - provider:Oracle: + provider:oracle: - providers/oracle/** provider:pagerduty: @@ -276,6 +276,9 @@ labelPRBasedOnFilePath: provider:ssh: - providers/ssh/** + provider:standard: + - providers/standard/** + provider:tableau: - providers/tableau/** @@ -326,6 +329,24 @@ labelPRBasedOnFilePath: - .rat-excludes - .readthedocs.yml + # This should be copy of the "area:dev-tools" above and should be updated when we switch maintenance branch + backport-to-v3-0-test: + - scripts/**/* + - dev/**/* + - .github/**/* + - Dockerfile.ci + - CONTRIBUTING.rst + - contributing-docs/**/* + - yamllint-config.yml + - .asf.yaml + - .bash_completion + - .dockerignore + - .hadolint.yaml + - .pre-commit-config.yaml + - .rat-excludes + - .readthedocs.yml + + kind:documentation: - airflow-core/docs/**/* - chart/docs/**/* @@ -353,11 +374,16 @@ labelPRBasedOnFilePath: - airflow-core/docs/administration-and-deployment/lineage.rst area:Logging: + - airflow-core/src/airflow/config_templates/airflow_local_settings.py + - airflow-core/tests/unit/core/test_logging_config.py - airflow-core/src/airflow/utils/log/**/* - airflow-core/docs/administration-and-deployment/logging-monitoring/logging-*.rst - airflow-core/tests/unit/utils/log/**/* - providers/**/log/* + area:ConfigTemplates: + - airflow-core/src/airflow/config_templates/* + area:Plugins: - airflow-core/src/airflow/cli/commands/plugins_command.py - airflow-core/src/airflow/plugins_manager.py diff --git a/.github/dependabot.yml b/.github/dependabot.yml index adefbb9f478f7..a0404b811674e 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -33,7 +33,6 @@ updates: - package-ecosystem: npm directories: - - /airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui - /airflow-core/src/airflow/ui schedule: interval: daily @@ -41,6 +40,16 @@ updates: core-ui-package-updates: patterns: - "*" + + - package-ecosystem: npm + directories: + - /airflow-core/src/airflow/api_fastapi/auth/managers/simple/ui + schedule: + interval: daily + groups: + core-ui-package-updates: + patterns: + - "*" - package-ecosystem: npm directories: - /providers/fab/src/airflow/providers/fab/www @@ -51,7 +60,7 @@ updates: patterns: - "*" - # Repeat dependency updates on 2.10 branch as well + # Repeat dependency updates on 2.11 branch as well - package-ecosystem: pip directories: - /clients/python @@ -60,14 +69,14 @@ updates: - / schedule: interval: daily - target-branch: v2-10-test + target-branch: v2-11-test - package-ecosystem: npm directories: - /airflow/www/ schedule: interval: daily - target-branch: v2-10-test + target-branch: v2-11-test groups: core-ui-package-updates: patterns: diff --git a/.github/workflows/additional-ci-image-checks.yml b/.github/workflows/additional-ci-image-checks.yml index 9e7edaee0ee85..d78404dfef5b4 100644 --- a/.github/workflows/additional-ci-image-checks.yml +++ b/.github/workflows/additional-ci-image-checks.yml @@ -135,7 +135,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" diff --git a/.github/workflows/additional-prod-image-tests.yml b/.github/workflows/additional-prod-image-tests.yml index a2e9deed6df5c..d8e3180f883c9 100644 --- a/.github/workflows/additional-prod-image-tests.yml +++ b/.github/workflows/additional-prod-image-tests.yml @@ -107,7 +107,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 2 persist-credentials: false @@ -144,7 +144,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 2 persist-credentials: false diff --git a/.github/workflows/airflow-distributions-tests.yml b/.github/workflows/airflow-distributions-tests.yml index a7156ef98dfb7..ffcd9e3183005 100644 --- a/.github/workflows/airflow-distributions-tests.yml +++ b/.github/workflows/airflow-distributions-tests.yml @@ -57,6 +57,10 @@ on: # yamllint disable-line rule:truthy description: "Whether this is a canary run (true/false)" required: true type: string + use-local-venv: + description: "Whether local venv should be used for tests (true/false)" + required: true + type: string permissions: contents: read jobs: @@ -80,7 +84,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ matrix.python-version }}" @@ -89,18 +93,34 @@ jobs: platform: ${{ inputs.platform }} python: ${{ matrix.python-version }} use-uv: ${{ inputs.use-uv }} + if: ${{ inputs.use-local-venv != 'true' }} + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh + shell: bash + if: ${{ inputs.use-local-venv == 'true' }} + - name: "Install Breeze" + uses: ./.github/actions/breeze + with: + use-uv: ${{ inputs.use-uv }} + if: ${{ inputs.use-local-venv == 'true' }} - name: "Cleanup dist files" run: rm -fv ./dist/* + if: ${{ matrix.python-version == inputs.default-python-version }} # Conditional steps based on the distribution name - name: "Prepare Airflow ${{inputs.distribution-name}}: wheel" env: DISTRIBUTION_TYPE: "${{ inputs.distribution-cmd-format }}" - run: > + USE_LOCAL_HATCH: "${{ inputs.use-local-venv }}" + run: | + uv tool uninstall hatch || true + uv tool install hatch==1.14.1 breeze release-management "${DISTRIBUTION_TYPE}" --distribution-format wheel + if: ${{ matrix.python-version == inputs.default-python-version }} - name: "Verify wheel packages with twine" run: | uv tool uninstall twine || true uv tool install twine && twine check dist/*.whl + if: ${{ matrix.python-version == inputs.default-python-version }} - name: > Run unit tests for Airflow ${{inputs.distribution-name}}:Python ${{ matrix.python-version }} env: diff --git a/.github/workflows/automatic-backport.yml b/.github/workflows/automatic-backport.yml index 4c72401a5d317..4f861ddd58118 100644 --- a/.github/workflows/automatic-backport.yml +++ b/.github/workflows/automatic-backport.yml @@ -37,7 +37,7 @@ jobs: - name: Find PR information id: pr-info - uses: actions/github-script@v7 + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.github/workflows/backport-cli.yml b/.github/workflows/backport-cli.yml index 673607027496d..0ecdfb8e63e04 100644 --- a/.github/workflows/backport-cli.yml +++ b/.github/workflows/backport-cli.yml @@ -53,7 +53,7 @@ jobs: steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" id: checkout-for-backport - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: true fetch-depth: 0 diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 52983ad65d559..3183ace6b0095 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -71,7 +71,7 @@ jobs: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: # Need to fetch all history for selective checks tests fetch-depth: 0 @@ -94,7 +94,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -105,7 +105,7 @@ jobs: version: 9 run_install: false - name: "Setup node" - uses: actions/setup-node@v4 + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 with: node-version: 21 cache: 'pnpm' @@ -158,12 +158,13 @@ jobs: runs-on: ${{ fromJSON(inputs.runners) }} env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + SKIP: ${{ inputs.skip-pre-commits }} steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Install Breeze" @@ -176,6 +177,7 @@ jobs: id: pre-commit with: python-version: ${{steps.breeze.outputs.host-python-version}} + skip-pre-commits: ${{ inputs.skip-pre-commits }} # Those checks are run if no image needs to be built for checks. This is for simple changes that # Do not touch any of the python code or any of the important files that might require building @@ -191,7 +193,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -205,9 +207,10 @@ jobs: uses: ./.github/actions/install-pre-commit id: pre-commit with: - python-version: ${{steps.breeze.outputs.host-python-version}} + python-version: ${{ steps.breeze.outputs.host-python-version }} + skip-pre-commits: ${{ inputs.skip-pre-commits }} - name: Fetch incoming commit ${{ github.sha }} with its parent - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.sha }} fetch-depth: 2 @@ -228,7 +231,7 @@ jobs: runs-on: ["windows-latest"] steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 2 persist-credentials: false @@ -246,7 +249,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -261,6 +264,7 @@ jobs: id: pre-commit with: python-version: ${{steps.breeze.outputs.host-python-version}} + skip-pre-commits: ${{ inputs.skip-pre-commits }} - name: "Autoupdate all pre-commits" run: pre-commit autoupdate - name: "Run automated upgrade for black" @@ -319,7 +323,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -359,4 +363,5 @@ jobs: run: git fetch --tags >/dev/null 2>&1 || true - name: "Test airflow core issue generation automatically" run: | - breeze release-management generate-issue-content-core --limit-pr-count 25 --latest --verbose + breeze release-management generate-issue-content-core \ + --limit-pr-count 2 --previous-release 3.0.1 --current-release 3.0.2 --verbose diff --git a/.github/workflows/ci-amd.yml b/.github/workflows/ci-amd.yml index e67351d64108f..6010123c9944f 100644 --- a/.github/workflows/ci-amd.yml +++ b/.github/workflows/ci-amd.yml @@ -113,6 +113,7 @@ jobs: run-coverage: ${{ steps.source-run-info.outputs.run-coverage }} run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} run-task-sdk-tests: ${{ steps.selective-checks.outputs.run-task-sdk-tests }} + run-go-sdk-tests: ${{ steps.selective-checks.outputs.run-go-sdk-tests }} run-system-tests: ${{ steps.selective-checks.outputs.run-system-tests }} run-tests: ${{ steps.selective-checks.outputs.run-tests }} run-ui-tests: ${{ steps.selective-checks.outputs.run-ui-tests }} @@ -134,13 +135,13 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" run: ./scripts/ci/prepare_and_cleanup_runner.sh - name: Fetch incoming commit ${{ github.sha }} with its parent - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.sha }} fetch-depth: 2 @@ -168,6 +169,37 @@ jobs: PR_LABELS: ${{ steps.source-run-info.outputs.pr-labels }} GITHUB_CONTEXT: ${{ toJson(github) }} + run-pin-versions-pre-commit: + name: "Run pin-versions pre-commit" + needs: [build-info] + runs-on: ${{ fromJSON(needs.build-info.outputs.amd-runners) }} + steps: + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + - name: "Install Python 3.11 as 3.11+ is needed by pin-versions pre-commit" + uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c # v4.9.1 + with: + python-version: 3.11 + cache: "pip" + - name: Install pre-commit, uv, and pre-commit-uv + shell: bash + env: + UV_VERSION: "0.7.14" # Keep this comment to allow automatic replacement of uv version + PRE_COMMIT_VERSION: "4.2.0" # Keep this comment to allow automatic replacement of pre-commit version + PRE_COMMIT_UV_VERSION: "4.1.4" # Keep this comment to allow automatic replacement of pre-commit-uv version + run: | + pip install uv==${UV_VERSION} || true + uv tool install pre-commit==${PRE_COMMIT_VERSION} --with uv==${UV_VERSION} \ + --with pre-commit-uv==${PRE_COMMIT_UV_VERSION} + - name: "Run pin-versions pre-commit" + run: > + pre-commit run -c .github/.pre-commit-config.yaml --all-files --verbose --hook-stage manual + pin-versions + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + basic-tests: name: "Basic tests" needs: [build-info] @@ -751,32 +783,71 @@ jobs: distribution-name: "task-sdk" distribution-cmd-format: "prepare-task-sdk-distributions" test-type: "task-sdk-tests" + use-local-venv: 'false' if: > ( needs.build-info.outputs.run-task-sdk-tests == 'true' || needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true') - tests-airflow-ctl: - name: "Airflow CTL tests" - uses: ./.github/workflows/airflow-distributions-tests.yml - needs: [build-info, build-ci-images] - permissions: - contents: read - packages: read - with: - runners: ${{ needs.build-info.outputs.amd-runners }} - platform: "linux/amd64" - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - python-versions: ${{ needs.build-info.outputs.python-versions }} - use-uv: ${{ needs.build-info.outputs.use-uv }} - canary-run: ${{ needs.build-info.outputs.canary-run }} - distribution-name: "airflow-ctl" - distribution-cmd-format: "prepare-airflow-ctl-distributions" - test-type: "airflow-ctl-tests" - if: > - ( needs.build-info.outputs.run-airflow-ctl-tests == 'true' || - needs.build-info.outputs.run-tests == 'true' && - needs.build-info.outputs.only-new-ui-files != 'true') +# tests-go-sdk: +# name: "Go SDK tests" +# needs: [build-info, build-ci-images] +# runs-on: ${{ fromJSON(needs.build-info.outputs.amd-runners) }} +# timeout-minutes: 15 +# permissions: +# contents: read +# packages: read +# if: > +# ( needs.build-info.outputs.run-go-sdk-tests == 'true' || +# needs.build-info.outputs.run-tests == 'true' && +# needs.build-info.outputs.only-new-ui-files != 'true') +# env: +# GITHUB_REPOSITORY: ${{ github.repository }} +# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +# GITHUB_USERNAME: ${{ github.actor }} +# VERBOSE: "true" +# steps: +# - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" +# uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 +# with: +# persist-credentials: false +# +# # keep this in sync with go.mod in go-sdk/ +# - name: Setup Go +# uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 +# with: +# go-version: 1.24 +# cache-dependency-path: go-sdk/go.sum +# +# - name: "Cleanup dist files" +# run: rm -fv ./dist/* +# +# - name: Run Go tests +# working-directory: ./go-sdk +# run: go test -v ./... +# +# tests-airflow-ctl: +# name: "Airflow CTL tests" +# uses: ./.github/workflows/airflow-distributions-tests.yml +# needs: [build-info] +# permissions: +# contents: read +# packages: read +# with: +# runners: ${{ needs.build-info.outputs.amd-runners }} +# platform: "linux/amd64" +# default-python-version: ${{ needs.build-info.outputs.default-python-version }} +# python-versions: ${{ needs.build-info.outputs.python-versions }} +# use-uv: ${{ needs.build-info.outputs.use-uv }} +# canary-run: ${{ needs.build-info.outputs.canary-run }} +# distribution-name: "airflow-ctl" +# distribution-cmd-format: "prepare-airflow-ctl-distributions" +# test-type: "airflow-ctl-tests" +# use-local-venv: 'true' +# if: > +# ( needs.build-info.outputs.run-airflow-ctl-tests == 'true' || +# needs.build-info.outputs.run-tests == 'true' && +# needs.build-info.outputs.only-new-ui-files != 'true') finalize-tests: name: Finalize tests @@ -829,6 +900,8 @@ jobs: notify-slack-failure: name: "Notify Slack on Failure" + needs: + - finalize-tests if: github.event_name == 'schedule' && failure() && github.run_attempt == 1 runs-on: ["ubuntu-22.04"] steps: @@ -846,7 +919,7 @@ jobs: - type: "section" text: type: "mrkdwn" - text: "🚨🕒 Scheduled CI Failure Alert (AMD) on branch *${{ github.ref_name }}* 🕒🚨\n\n*Details:* " + text: "🚨🕒 Scheduled CI Failure Alert (AMD) 🕒🚨\n\n*Details:* " # yamllint enable rule:line-length summarize-warnings: @@ -859,7 +932,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -868,12 +941,12 @@ jobs: shell: bash run: ./scripts/tools/free_up_disk_space.sh - name: "Download all test warning artifacts from the current build" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: path: ./artifacts pattern: test-warnings-* - name: "Setup python" - uses: actions/setup-python@v5 + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 with: python-version: ${{ inputs.default-python-version }} - name: "Summarize all warnings" @@ -882,7 +955,7 @@ jobs: --pattern "**/warnings-*.txt" \ --output ./files - name: "Upload artifact for summarized warnings" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: test-summarized-amd-runner-warnings path: ./files/warn-summary-*.txt diff --git a/.github/workflows/ci-arm.yml b/.github/workflows/ci-arm.yml index c1b39b7d3dfc0..bf54881cd1592 100644 --- a/.github/workflows/ci-arm.yml +++ b/.github/workflows/ci-arm.yml @@ -106,6 +106,7 @@ jobs: run-coverage: ${{ steps.source-run-info.outputs.run-coverage }} run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} run-task-sdk-tests: ${{ steps.selective-checks.outputs.run-task-sdk-tests }} + run-go-sdk-tests: ${{ steps.selective-checks.outputs.run-go-sdk-tests }} run-system-tests: ${{ steps.selective-checks.outputs.run-system-tests }} run-tests: ${{ steps.selective-checks.outputs.run-tests }} run-ui-tests: ${{ steps.selective-checks.outputs.run-ui-tests }} @@ -127,13 +128,13 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" run: ./scripts/ci/prepare_and_cleanup_runner.sh - name: Fetch incoming commit ${{ github.sha }} with its parent - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.sha }} fetch-depth: 2 @@ -507,6 +508,43 @@ jobs: ( needs.build-info.outputs.run-kubernetes-tests == 'true' || needs.build-info.outputs.needs-helm-tests == 'true') + tests-go-sdk: + name: "Go SDK tests" + needs: [build-info, build-ci-images] + runs-on: ${{ fromJSON(needs.build-info.outputs.arm-runners) }} + timeout-minutes: 15 + permissions: + contents: read + packages: read + if: > + ( needs.build-info.outputs.run-go-sdk-tests == 'true' || + needs.build-info.outputs.run-tests == 'true' && + needs.build-info.outputs.only-new-ui-files != 'true') + env: + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + VERBOSE: "true" + steps: + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + + # keep this in sync with go.mod in go-sdk/ + - name: Setup Go + uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 + with: + go-version: 1.24 + cache-dependency-path: go-sdk/go.sum + + - name: "Cleanup dist files" + run: rm -fv ./dist/* + + - name: Run Go tests + working-directory: ./go-sdk + run: go test -v ./... + finalize-tests: name: Finalize tests permissions: @@ -549,6 +587,8 @@ jobs: notify-slack-failure: name: "Notify Slack on Failure" + needs: + - finalize-tests if: github.event_name == 'schedule' && failure() && github.run_attempt == 1 runs-on: ["ubuntu-22.04"] steps: @@ -566,5 +606,5 @@ jobs: - type: "section" text: type: "mrkdwn" - text: "🚨🕒 Scheduled CI Failure Alert (AMD) on branch *${{ github.ref_name }}* 🕒🚨\n\n*Details:* " + text: "🚨🕒 Scheduled CI Failure Alert (ARM) 🕒🚨\n\n*Details:* " # yamllint enable rule:line-length diff --git a/.github/workflows/ci-image-build.yml b/.github/workflows/ci-image-build.yml index b52fdb906fa2e..f58fe1946ca1f 100644 --- a/.github/workflows/ci-image-build.yml +++ b/.github/workflows/ci-image-build.yml @@ -117,7 +117,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout target branch" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" diff --git a/.github/workflows/ci-image-checks.yml b/.github/workflows/ci-image-checks.yml index a8535acfccbd5..ec47528297ec3 100644 --- a/.github/workflows/ci-image-checks.yml +++ b/.github/workflows/ci-image-checks.yml @@ -127,7 +127,7 @@ jobs: run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" if: inputs.canary-run == 'true' - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false if: inputs.canary-run == 'true' @@ -173,7 +173,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" @@ -216,7 +216,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" @@ -264,7 +264,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" @@ -277,26 +277,25 @@ jobs: uses: apache/infrastructure-actions/stash/restore@1c35b5ccf8fba5d4c3fdf25a045ca91aa0cbc468 with: path: ./generated/_inventory_cache/ - # TODO(potiuk): do better with determining the key - key: cache-docs-inventory-v1-${{ hashFiles('pyproject.toml') }} + key: cache-docs-inventory-v1-${{ hashFiles('**/pyproject.toml') }} id: restore-docs-inventory-cache - name: "Building docs with ${{ matrix.flag }} flag" env: DOCS_LIST_AS_STRING: ${{ inputs.docs-list-as-string }} run: > - breeze build-docs ${DOCS_LIST_AS_STRING} ${{ matrix.flag }} + breeze build-docs ${DOCS_LIST_AS_STRING} ${{ matrix.flag }} --refresh-airflow-inventories - name: "Save docs inventory cache" uses: apache/infrastructure-actions/stash/save@1c35b5ccf8fba5d4c3fdf25a045ca91aa0cbc468 with: path: ./generated/_inventory_cache/ - key: cache-docs-inventory-v1-${{ hashFiles('pyproject.toml') }} + key: cache-docs-inventory-v1-${{ hashFiles('**/pyproject.toml') }} if-no-files-found: 'error' retention-days: '2' # If we upload from multiple matrix jobs we could end up with a race condition. so just pick one job # to be responsible for updating it. https://github.com/actions/upload-artifact/issues/506 if: steps.restore-docs-inventory-cache != 'true' && matrix.flag == '--docs-only' - name: "Upload build docs" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: airflow-docs path: './generated/_build' @@ -328,7 +327,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" @@ -338,7 +337,7 @@ jobs: python: ${{ inputs.default-python-version }} use-uv: ${{ inputs.use-uv }} - name: "Download docs prepared as artifacts" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: airflow-docs path: './generated/_build' @@ -403,12 +402,12 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 2 persist-credentials: false - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: repository: "apache/airflow-client-python" fetch-depth: 1 diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 1fcf81a84fd5b..28c8cfae81a07 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -47,17 +47,17 @@ jobs: security-events: write steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0 with: languages: ${{ matrix.language }} - name: Autobuild - uses: github/codeql-action/autobuild@v3 + uses: github/codeql-action/autobuild@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0 diff --git a/.github/workflows/finalize-tests.yml b/.github/workflows/finalize-tests.yml index 6afcbd0812219..fb4b46d9d6256 100644 --- a/.github/workflows/finalize-tests.yml +++ b/.github/workflows/finalize-tests.yml @@ -99,7 +99,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: # Needed to perform push action persist-credentials: false @@ -109,14 +109,14 @@ jobs: id: constraints-branch run: ./scripts/ci/constraints/ci_branch_constraints.sh >> ${GITHUB_OUTPUT} - name: Checkout ${{ steps.constraints-branch.outputs.branch }} - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: path: "constraints" ref: ${{ steps.constraints-branch.outputs.branch }} persist-credentials: true fetch-depth: 0 - name: "Download constraints from the constraints generated by build CI image" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: constraints path: ./files diff --git a/.github/workflows/generate-constraints.yml b/.github/workflows/generate-constraints.yml index 35b31db98e575..e6548f3240ed8 100644 --- a/.github/workflows/generate-constraints.yml +++ b/.github/workflows/generate-constraints.yml @@ -64,7 +64,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -130,7 +130,7 @@ jobs: df -H done - name: "Upload constraint artifacts" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: constraints path: ./files/constraints-*/constraints-*.txt diff --git a/.github/workflows/helm-tests.yml b/.github/workflows/helm-tests.yml index 26bf28cce1beb..e889e25160cea 100644 --- a/.github/workflows/helm-tests.yml +++ b/.github/workflows/helm-tests.yml @@ -68,7 +68,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" @@ -93,7 +93,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -130,13 +130,12 @@ jobs: SIGN_WITH: dev@airflow.apache.org - name: "Fetch Git Tags" run: git fetch --tags - - name: "Test helm chart issue generation automatically" - # Adding same tags for now, will address in a follow-up + - name: "Test helm chart issue generation" run: > - breeze release-management generate-issue-content-helm-chart --limit-pr-count 10 - --latest --verbose + breeze release-management generate-issue-content-helm-chart --limit-pr-count 2 + --previous-release helm-chart/1.15.0 --current-release helm-chart/1.16.0 --verbose - name: "Upload Helm artifacts" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: Helm artifacts path: ./dist/airflow-* diff --git a/.github/workflows/integration-system-tests.yml b/.github/workflows/integration-system-tests.yml index bf75cc87f31a3..6619bedd7c65e 100644 --- a/.github/workflows/integration-system-tests.yml +++ b/.github/workflows/integration-system-tests.yml @@ -97,7 +97,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" @@ -146,7 +146,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" @@ -190,7 +190,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index 6ed0c79d187c5..37aa3aa703ce1 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -80,7 +80,7 @@ jobs: echo "PYTHON_MAJOR_MINOR_VERSION=${KUBERNETES_COMBO}" | sed 's/-.*//' >> $GITHUB_ENV echo "KUBERNETES_VERSION=${KUBERNETES_COMBO}" | sed 's/=[^-]*-/=/' >> $GITHUB_ENV - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false # env.PYTHON_MAJOR_MINOR_VERSION, env.KUBERNETES_VERSION are set in the previous @@ -115,7 +115,7 @@ jobs: - name: "\ Upload KinD logs ${{ matrix.executor }}-${{ matrix.kubernetes-combo }}-\ ${{ matrix.use-standard-naming }}" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: "\ kind-logs-${{ matrix.kubernetes-combo }}-${{ matrix.executor }}-\ diff --git a/.github/workflows/news-fragment.yml b/.github/workflows/news-fragment.yml index f6f68d1288a35..04e308c306138 100644 --- a/.github/workflows/news-fragment.yml +++ b/.github/workflows/news-fragment.yml @@ -30,7 +30,7 @@ jobs: if: "contains(github.event.pull_request.labels.*.name, 'airflow3.0:breaking')" steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false # `towncrier check` runs `git diff --name-only origin/main...`, which diff --git a/.github/workflows/prod-image-build.yml b/.github/workflows/prod-image-build.yml index c5cf5cd5cef96..2ba47bf8c4990 100644 --- a/.github/workflows/prod-image-build.yml +++ b/.github/workflows/prod-image-build.yml @@ -124,7 +124,7 @@ jobs: run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" if: inputs.upload-package-artifact == 'true' - name: "Checkout target branch" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -171,7 +171,7 @@ jobs: breeze release-management prepare-airflow-ctl-distributions --distribution-format wheel if: inputs.upload-package-artifact == 'true' - name: "Upload prepared packages as artifacts" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: prod-packages path: ./dist @@ -211,7 +211,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout target branch" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -224,14 +224,14 @@ jobs: shell: bash run: rm -fv ./dist/* ./docker-context-files/* - name: "Download packages prepared as artifacts" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: prod-packages path: ./docker-context-files - name: "Show downloaded packages" run: ls -la ./docker-context-files - name: "Download constraints" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: constraints path: ./docker-context-files diff --git a/.github/workflows/publish-docs-to-s3.yml b/.github/workflows/publish-docs-to-s3.yml index 748a466cf8ce6..6139d33182e50 100644 --- a/.github/workflows/publish-docs-to-s3.yml +++ b/.github/workflows/publish-docs-to-s3.yml @@ -24,31 +24,30 @@ on: # yamllint disable-line rule:truthy description: "The branch or tag to checkout for the docs publishing" required: true type: string - include-docs: - description: | - Space separated list of docs to build. - To publish docs for nested provider packages, provide the package name with `.` - eg: amazon common.messaging apache.kafka - + destination: + description: "The destination location in S3" required: false - default: "all-providers" + default: auto + type: choice + options: + - auto + - live + - staging + include-docs: + description: "Space separated list of packages to build" + required: true type: string exclude-docs: description: "Comma separated list of docs to exclude" required: false default: "no-docs-excluded" type: string - destination-location: - description: "The destination location in S3, default is live site" + skip-write-to-stable-folder: + description: "Do not override stable version" required: false - default: "s3://live-docs-airflow-apache-org/docs" - type: choice - options: - - s3://live-docs-airflow-apache-org/docs - - s3://staging-docs-airflow-apache-org/docs + default: false + type: boolean -env: - AIRFLOW_ROOT_PATH: "/home/runner/work/temp-airflow-repo-reference" # checkout dir for referenced tag permissions: contents: read jobs: @@ -62,9 +61,15 @@ jobs: REF: ${{ inputs.ref }} INCLUDE_DOCS: ${{ inputs.include-docs }} EXCLUDE_DOCS: ${{ inputs.exclude-docs }} - DESTINATION_LOCATION: ${{ inputs.destination-location }} + DESTINATION: ${{ inputs.destination }} + SKIP_WRITE_TO_STABLE_FOLDER: ${{ inputs.skip-write-to-stable-folder }} outputs: include-docs: ${{ inputs.include-docs == 'all' && '' || inputs.include-docs }} + destination-location: ${{ steps.parameters.outputs.destination-location }} + destination: ${{ steps.parameters.outputs.destination }} + extra-build-options: ${{ steps.parameters.outputs.extra-build-options }} + # yamllint disable rule:line-length + skip-write-to-stable-folder: ${{ inputs.skip-write-to-stable-folder && '--skip-write-to-stable-folder' || '' }} if: contains(fromJSON('[ "ashb", "eladkal", @@ -78,39 +83,33 @@ jobs: steps: - name: "Input parameters summary" shell: bash + id: parameters run: | echo "Input parameters summary" echo "=========================" echo "Ref: '${REF}'" echo "Included docs : '${INCLUDE_DOCS}'" echo "Exclude docs: '${EXCLUDE_DOCS}'" - echo "Destination location: '${DESTINATION_LOCATION}'" - - build-ci-images: - name: Build CI images - uses: ./.github/workflows/ci-image-build.yml - needs: [build-info] - permissions: - contents: read - # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs - # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. - packages: write - with: - runners: '["ubuntu-22.04"]' - platform: "linux/amd64" - push-image: "false" - upload-image-artifact: "true" - upload-mount-cache-artifact: false - python-versions: "['3.9']" - branch: ${{ inputs.ref }} - use-uv: true - upgrade-to-newer-dependencies: false - constraints-branch: "constraints-main" - docker-cache: registry - disable-airflow-repo-cache: false + echo "Destination: '${DESTINATION}'" + echo "Skip write to stable folder: '${SKIP_WRITE_TO_STABLE_FOLDER}'" + if [[ "${DESTINATION}" == "auto" ]]; then + if [[ "${REF}" =~ ^.*[0-9]*\.[0-9]*\.[0-9]*$ ]]; then + echo "${REF} looks like final release, using live destination" + DESTINATION="live" + else + echo "${REF} does not looks like final release, using staging destination" + DESTINATION="staging" + fi + fi + echo "destination=${DESTINATION}" >> ${GITHUB_OUTPUT} + if [[ "${DESTINATION}" == "live" ]]; then + echo "destination-location=s3://live-docs-airflow-apache-org/docs/" >> ${GITHUB_OUTPUT} + else + echo "destination-location=s3://staging-docs-airflow-apache-org/docs/" >> ${GITHUB_OUTPUT} + fi build-docs: - needs: [build-ci-images, build-info] + needs: [build-info] timeout-minutes: 150 name: "Build documentation" runs-on: ubuntu-latest @@ -118,40 +117,60 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - INCLUDE_NOT_READY_PROVIDERS: "true" INCLUDE_SUCCESS_OUTPUTS: false PYTHON_MAJOR_MINOR_VERSION: 3.9 VERBOSE: "true" + EXTRA_BUILD_OPTIONS: ${{ needs.build-info.outputs.extra-build-options }} steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: "Checkout ${{ github.ref }} " - uses: actions/checkout@v4 + # Check out the repo first to run cleanup - in sub-folder + - name: "Checkout current version first to clean-up stuff" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Checkout from ${{ inputs.ref }} to build docs" - run: | - git clone https://github.com/apache/airflow.git "${AIRFLOW_ROOT_PATH}" - cd "${AIRFLOW_ROOT_PATH}" && git checkout ${REF} - env: - REF: ${{ inputs.ref }} - - name: "Prepare breeze & CI image: 3.9" - uses: ./.github/actions/prepare_breeze_and_image + path: current-version + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh + working-directory: current-version + # We are checking repo for both - breeze and docs from the ref provided as input + # This will take longer as we need to rebuild CI image and it will not use cache + # but it will build the CI image from the version of Airflow that is used to check out things + - name: "Checkout ${{ inputs.ref }} " + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + ref: ${{ inputs.ref }} + fetch-depth: 0 + fetch-tags: true + - name: "Install Breeze" + uses: ./.github/actions/breeze with: - platform: "linux/amd64" - python: 3.9 - use-uv: true + use-uv: ${{ inputs.use-uv }} - name: "Building docs with --docs-only flag" + env: + INCLUDE_DOCS: ${{ needs.build-info.outputs.include-docs }} + INCLUDE_COMMITS: "true" + run: > + breeze build-docs ${INCLUDE_DOCS} --docs-only + - name: Check disk space available + run: df -H + # Here we will create temp airflow-site dir to publish docs + - name: Create /mnt/airflow-site directory + run: | + sudo mkdir -p /mnt/airflow-site && sudo chown -R "${USER}" /mnt/airflow-site + echo "AIRFLOW_SITE_DIRECTORY=/mnt/airflow-site/" >> "$GITHUB_ENV" + - name: "Publish docs to /mnt/airflow-site directory" env: INCLUDE_DOCS: ${{ needs.build-info.outputs.include-docs }} run: > - breeze build-docs ${INCLUDE_DOCS} --docs-only --include-commits + breeze release-management publish-docs --override-versioned --run-in-parallel ${INCLUDE_DOCS} - name: "Upload build docs" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: airflow-docs - path: ${{ env.AIRFLOW_ROOT_PATH }}/generated/_build + path: /mnt/airflow-site retention-days: '7' if-no-files-found: 'error' overwrite: 'true' @@ -167,7 +186,6 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - INCLUDE_NOT_READY_PROVIDERS: "true" INCLUDE_SUCCESS_OUTPUTS: false PYTHON_MAJOR_MINOR_VERSION: 3.9 VERBOSE: "true" @@ -175,51 +193,39 @@ jobs: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: "Checkout ${{ github.ref }} " - uses: actions/checkout@v4 + # We are checking repo for both - breeze and docs from the "workflow' branch + # This will take longer as we need to rebuild CI image and it will not use cache + # but it will build the CI image from the version of Airflow that is used to check out things + - name: "Checkout ${{ inputs.ref }} " + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - - name: "Prepare breeze & CI image: 3.9" - uses: ./.github/actions/prepare_breeze_and_image + - name: "Prepare and cleanup runner" + run: ./scripts/ci/prepare_and_cleanup_runner.sh + - name: "Install Breeze" + uses: ./.github/actions/breeze with: - platform: "linux/amd64" - python: 3.9 - use-uv: true - - name: "Checkout ${{ inputs.ref }}" - run: | - git clone https://github.com/apache/airflow.git "${AIRFLOW_ROOT_PATH}" - cd "${AIRFLOW_ROOT_PATH}" && git checkout ${REF} - env: - REF: ${{ inputs.ref }} + use-uv: ${{ inputs.use-uv }} - name: "Download docs prepared as artifacts" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: airflow-docs - path: ${{ env.AIRFLOW_ROOT_PATH }}/generated/_build + path: /mnt/airflow-site - name: Check disk space available run: df -H - # Here we will create temp airflow-site dir to publish - # docs and for back-references - - name: Create /mnt/airflow-site directory - run: | - sudo mkdir -p /mnt/airflow-site && sudo chown -R "${USER}" /mnt/airflow-site - echo "AIRFLOW_SITE_DIRECTORY=/mnt/airflow-site/" >> "$GITHUB_ENV" - - name: "Publish docs to tmp directory" + - name: "Update watermarks" env: - INCLUDE_DOCS: ${{ needs.build-info.outputs.include-docs }} - run: > - breeze release-management publish-docs --override-versioned --run-in-parallel - ${INCLUDE_DOCS} - - name: Check disk space available - run: df -H - - name: "Generate back references for providers" - run: breeze release-management add-back-references all-providers - - name: "Generate back references for apache-airflow" - run: breeze release-management add-back-references apache-airflow - - name: "Generate back references for docker-stack" - run: breeze release-management add-back-references docker-stack - - name: "Generate back references for helm-chart" - run: breeze release-management add-back-references helm-chart + SOURCE_DIR_PATH: "/mnt/airflow-site/docs-archive/" + # yamllint disable rule:line-length + run: | + curl -sSf -o add_watermark.py https://raw.githubusercontent.com/apache/airflow-site/refs/heads/main/.github/scripts/add_watermark.py \ + --header "Authorization: Bearer ${{ github.token }} " --header "X-GitHub-Api-Version: 2022-11-28" + chmod a+x add_watermark.py + mkdir -p images + curl -sSf -o images/staging.png https://raw.githubusercontent.com/apache/airflow-site/refs/heads/main/.github/scripts/images/staging.png + uv run add_watermark.py --pattern 'main.min*css' --folder ${SOURCE_DIR_PATH} \ + --image-directory images --url-prefix /images + if: needs.build-info.outputs.destination == 'staging' - name: Install AWS CLI v2 run: | curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o /tmp/awscliv2.zip @@ -235,10 +241,11 @@ jobs: aws-region: us-east-2 - name: "Syncing docs to S3" env: - DESTINATION_LOCATION: "${{ inputs.destination-location }}" + DESTINATION_LOCATION: "${{ needs.build-info.outputs.destination-location }}" SOURCE_DIR_PATH: "/mnt/airflow-site/docs-archive/" EXCLUDE_DOCS: "${{ inputs.exclude-docs }}" + SKIP_WRITE_TO_STABLE_FOLDER: "${{ needs.build-info.outputs.skip-write-to-stable-folder }}" run: | breeze release-management publish-docs-to-s3 --source-dir-path ${SOURCE_DIR_PATH} \ --destination-location ${DESTINATION_LOCATION} --stable-versions \ - --exclude-docs ${EXCLUDE_DOCS} --overwrite + --exclude-docs ${EXCLUDE_DOCS} --overwrite ${SKIP_WRITE_TO_STABLE_FOLDER} diff --git a/.github/workflows/push-image-cache.yml b/.github/workflows/push-image-cache.yml index f2258c13b77a9..b523577b46c99 100644 --- a/.github/workflows/push-image-cache.yml +++ b/.github/workflows/push-image-cache.yml @@ -113,7 +113,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -184,7 +184,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -196,7 +196,7 @@ jobs: - name: "Cleanup dist and context file" run: rm -fv ./dist/* ./docker-context-files/* - name: "Download packages prepared as artifacts" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: prod-packages path: ./docker-context-files diff --git a/.github/workflows/recheck-old-bug-report.yml b/.github/workflows/recheck-old-bug-report.yml index 217092b86f87e..c245f73923216 100644 --- a/.github/workflows/recheck-old-bug-report.yml +++ b/.github/workflows/recheck-old-bug-report.yml @@ -28,7 +28,7 @@ jobs: recheck-old-bug-report: runs-on: ["ubuntu-22.04"] steps: - - uses: actions/stale@v9 + - uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0 with: only-issue-labels: 'kind:bug' stale-issue-label: 'Stale Bug Report' diff --git a/.github/workflows/release_dockerhub_image.yml b/.github/workflows/release_dockerhub_image.yml index fe1ce300fbf4f..2499e521d74a3 100644 --- a/.github/workflows/release_dockerhub_image.yml +++ b/.github/workflows/release_dockerhub_image.yml @@ -83,7 +83,7 @@ jobs: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" diff --git a/.github/workflows/release_single_dockerhub_image.yml b/.github/workflows/release_single_dockerhub_image.yml index 55a8c2cc00429..fd572adbabab1 100644 --- a/.github/workflows/release_single_dockerhub_image.yml +++ b/.github/workflows/release_single_dockerhub_image.yml @@ -76,7 +76,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -148,7 +148,7 @@ jobs: shell: bash run: find ./dist -name '*.json' - name: "Upload metadata artifact ${{ env.ARTIFACT_NAME }}" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: ${{ env.ARTIFACT_NAME }} path: ./dist/metadata-* @@ -174,7 +174,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare and cleanup runner" @@ -197,7 +197,7 @@ jobs: ACTOR: ${{ github.actor }} run: echo "${GITHUB_TOKEN}" | docker login ghcr.io -u ${ACTOR} --password-stdin - name: "Download metadata artifacts" - uses: actions/download-artifact@v4 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: path: ./dist pattern: metadata-${{ inputs.pythonVersion }}-* diff --git a/.github/workflows/run-unit-tests.yml b/.github/workflows/run-unit-tests.yml index 4b99ac6031137..035248113dac1 100644 --- a/.github/workflows/run-unit-tests.yml +++ b/.github/workflows/run-unit-tests.yml @@ -167,7 +167,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ matrix.python-version }}" diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 2e03e9f33b120..5724a17314aec 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -29,7 +29,7 @@ jobs: stale: runs-on: ["ubuntu-22.04"] steps: - - uses: actions/stale@v9 + - uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0 with: stale-pr-message: > This pull request has been automatically marked as stale because it has not had diff --git a/.github/workflows/test-providers.yml b/.github/workflows/test-providers.yml index 411b18daaa7b4..525e8e8599a9d 100644 --- a/.github/workflows/test-providers.yml +++ b/.github/workflows/test-providers.yml @@ -89,7 +89,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" @@ -138,7 +138,8 @@ jobs: - name: "Install and verify wheel provider distributions" env: DISTRIBUTION_FORMAT: ${{ matrix.package-format }} - AIRFLOW_SKIP_CONSTRAINTS: "${{ inputs.upgrade-to-newer-dependencies }}" + # yamllint disable rule:line-length + INSTALL_AIRFLOW_WITH_CONSTRAINTS: "${{ inputs.upgrade-to-newer-dependencies == 'true' && 'false' || 'true' }}" run: > breeze release-management verify-provider-distributions --use-distributions-from-dist @@ -187,7 +188,7 @@ jobs: shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Prepare breeze & CI image: ${{ matrix.compat.python-version }}" diff --git a/Dockerfile b/Dockerfile index 5ee3b0c829d94..fbd99bc867906 100644 --- a/Dockerfile +++ b/Dockerfile @@ -56,8 +56,8 @@ ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" # Also use `force pip` label on your PR to swap all places we use `uv` to `pip` ARG AIRFLOW_PIP_VERSION=25.1.1 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" -ARG AIRFLOW_SETUPTOOLS_VERSION=80.8.0 -ARG AIRFLOW_UV_VERSION=0.7.8 +ARG AIRFLOW_SETUPTOOLS_VERSION=80.9.0 +ARG AIRFLOW_UV_VERSION=0.7.14 ARG AIRFLOW_USE_UV="false" ARG UV_HTTP_TIMEOUT="300" ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" @@ -232,6 +232,24 @@ readonly MARIADB_LTS_VERSION="10.11" : "${INSTALL_MYSQL_CLIENT:?Should be true or false}" : "${INSTALL_MYSQL_CLIENT_TYPE:-mariadb}" +retry() { + local retries=3 + local count=0 + # adding delay of 10 seconds + local delay=10 + until "$@"; do + exit_code=$? + count=$((count + 1)) + if [[ $count -lt $retries ]]; then + echo "Command failed. Attempt $count/$retries. Retrying in ${delay}s..." + sleep $delay + else + echo "Command failed after $retries attempts." + return $exit_code + fi + done +} + install_mysql_client() { if [[ "${1}" == "dev" ]]; then packages=("libmysqlclient-dev" "mysql-client") @@ -257,8 +275,8 @@ install_mysql_client() { echo "deb http://repo.mysql.com/apt/debian/ $(lsb_release -cs) mysql-${MYSQL_LTS_VERSION}" > \ /etc/apt/sources.list.d/mysql.list - apt-get update - apt-get install --no-install-recommends -y "${packages[@]}" + retry apt-get update + retry apt-get install --no-install-recommends -y "${packages[@]}" apt-get autoremove -yqq --purge apt-get clean && rm -rf /var/lib/apt/lists/* @@ -302,8 +320,8 @@ install_mariadb_client() { /etc/apt/sources.list.d/mariadb.list # Make sure that dependencies from MariaDB repo are preferred over Debian dependencies printf "Package: *\nPin: release o=MariaDB\nPin-Priority: 999\n" > /etc/apt/preferences.d/mariadb - apt-get update - apt-get install --no-install-recommends -y "${packages[@]}" + retry apt-get update + retry apt-get install --no-install-recommends -y "${packages[@]}" apt-get autoremove -yqq --purge apt-get clean && rm -rf /var/lib/apt/lists/* } @@ -655,7 +673,7 @@ if [[ $(id -u) == "0" ]]; then echo echo "${COLOR_RED}You are running pip as root. Please use 'airflow' user to run pip!${COLOR_RESET}" echo - echo "${COLOR_YELLOW}See: https://airflow.apache.org/docs/docker-stack/build.html#adding-a-new-pypi-package${COLOR_RESET}" + echo "${COLOR_YELLOW}See: https://airflow.apache.org/docs/docker-stack/build.html#adding-new-pypi-packages-individually${COLOR_RESET}" echo exit 1 fi @@ -1295,7 +1313,7 @@ function check_uid_gid() { >&2 echo " This is to make sure you can run the image with an arbitrary UID in the future." >&2 echo >&2 echo " See more about it in the Airflow's docker image documentation" - >&2 echo " http://airflow.apache.org/docs/docker-stack/entrypoint" + >&2 echo " https://airflow.apache.org/docs/docker-stack/entrypoint.html" >&2 echo # We still allow the image to run with `airflow` user. return @@ -1309,7 +1327,7 @@ function check_uid_gid() { >&2 echo " This is to make sure you can run the image with an arbitrary UID." >&2 echo >&2 echo " See more about it in the Airflow's docker image documentation" - >&2 echo " http://airflow.apache.org/docs/docker-stack/entrypoint" + >&2 echo " https://airflow.apache.org/docs/docker-stack/entrypoint.html" # This will not work so we fail hard exit 1 fi diff --git a/Dockerfile.ci b/Dockerfile.ci index 3637654a54fce..beebd98f788ae 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -171,6 +171,24 @@ readonly MARIADB_LTS_VERSION="10.11" : "${INSTALL_MYSQL_CLIENT:?Should be true or false}" : "${INSTALL_MYSQL_CLIENT_TYPE:-mariadb}" +retry() { + local retries=3 + local count=0 + # adding delay of 10 seconds + local delay=10 + until "$@"; do + exit_code=$? + count=$((count + 1)) + if [[ $count -lt $retries ]]; then + echo "Command failed. Attempt $count/$retries. Retrying in ${delay}s..." + sleep $delay + else + echo "Command failed after $retries attempts." + return $exit_code + fi + done +} + install_mysql_client() { if [[ "${1}" == "dev" ]]; then packages=("libmysqlclient-dev" "mysql-client") @@ -196,8 +214,8 @@ install_mysql_client() { echo "deb http://repo.mysql.com/apt/debian/ $(lsb_release -cs) mysql-${MYSQL_LTS_VERSION}" > \ /etc/apt/sources.list.d/mysql.list - apt-get update - apt-get install --no-install-recommends -y "${packages[@]}" + retry apt-get update + retry apt-get install --no-install-recommends -y "${packages[@]}" apt-get autoremove -yqq --purge apt-get clean && rm -rf /var/lib/apt/lists/* @@ -241,8 +259,8 @@ install_mariadb_client() { /etc/apt/sources.list.d/mariadb.list # Make sure that dependencies from MariaDB repo are preferred over Debian dependencies printf "Package: *\nPin: release o=MariaDB\nPin-Priority: 999\n" > /etc/apt/preferences.d/mariadb - apt-get update - apt-get install --no-install-recommends -y "${packages[@]}" + retry apt-get update + retry apt-get install --no-install-recommends -y "${packages[@]}" apt-get autoremove -yqq --purge apt-get clean && rm -rf /var/lib/apt/lists/* } @@ -813,7 +831,6 @@ if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then set -x fi - . "${AIRFLOW_SOURCES:-/opt/airflow}"/scripts/in_container/_in_container_script_init.sh LD_PRELOAD="/usr/lib/$(uname -m)-linux-gnu/libstdc++.so.6" @@ -996,7 +1013,7 @@ function determine_airflow_to_use() { echo "${COLOR_BLUE}Uninstalling all packages first${COLOR_RESET}" echo # shellcheck disable=SC2086 - ${PACKAGING_TOOL_CMD} freeze | grep -ve "^-e" | grep -ve "^#" | grep -ve "^uv" | \ + ${PACKAGING_TOOL_CMD} freeze | grep -ve "^-e" | grep -ve "^#" | grep -ve "^uv" | grep -v "@" | \ xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} # Now install rich ad click first to use the installation script # shellcheck disable=SC2086 @@ -1008,7 +1025,9 @@ function determine_airflow_to_use() { echo # Use uv run to install necessary dependencies automatically # in the future we will be able to use uv sync when `uv.lock` is supported - uv run /opt/airflow/scripts/in_container/install_development_dependencies.py \ + # for the use in parallel runs in docker containers--no-cache is needed - otherwise there is + # possibility of overriding temporary environments by multiple parallel processes + uv run --no-cache /opt/airflow/scripts/in_container/install_development_dependencies.py \ --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt # Some packages might leave legacy typing module which causes test issues # shellcheck disable=SC2086 @@ -1040,7 +1059,7 @@ function check_boto_upgrade() { # shellcheck disable=SC2086 ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} aiobotocore s3fs || true # shellcheck disable=SC2086 - ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade boto3 botocore + ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade "boto3<1.38.3" "botocore<1.38.3" set +x } @@ -1119,23 +1138,32 @@ function check_airflow_python_client_installation() { python "${IN_CONTAINER_DIR}/install_airflow_python_client.py" } +function initialize_db() { + # If we are going to start the api server OR we are a system test (which may or may not start the api server, + # depending on the Airflow version being used to run the tests), then migrate the DB. + if [[ ${START_API_SERVER_WITH_EXAMPLES=} == "true" || ${TEST_GROUP:=""} == "system" ]]; then + echo + echo "${COLOR_BLUE}Initializing database${COLOR_RESET}" + echo + airflow db migrate + echo + echo "${COLOR_BLUE}Database initialized${COLOR_RESET}" + fi +} + function start_api_server_with_examples(){ - # check if we should not start the api server with examples by checking if both - # START_API_SERVER_WITH_EXAMPLES is false AND the TEST_GROUP env var is not equal to "system" + USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION:=""}" + # Do not start the api server if either START_API_SERVER_WITH_EXAMPLES is false or the TEST_GROUP env var is not + # equal to "system". if [[ ${START_API_SERVER_WITH_EXAMPLES=} != "true" && ${TEST_GROUP:=""} != "system" ]]; then return fi + # If the use Airflow version is set and it is <= 3.0.0 (which does not have the API server anyway) also return + if [[ ${USE_AIRFLOW_VERSION} != "" && ${USE_AIRFLOW_VERSION} < "3.0.0" ]]; then + return + fi export AIRFLOW__CORE__LOAD_EXAMPLES=True export AIRFLOW__API__EXPOSE_CONFIG=True - echo - echo "${COLOR_BLUE}Initializing database${COLOR_RESET}" - echo - airflow db migrate - echo - echo "${COLOR_BLUE}Database initialized${COLOR_RESET}" - echo - echo "${COLOR_BLUE}Parsing example dags${COLOR_RESET}" - echo airflow dags reserialize echo "Example dags parsing finished" if airflow config get-value core auth_manager | grep -q "FabAuthManager"; then @@ -1172,6 +1200,7 @@ check_downgrade_sqlalchemy check_downgrade_pendulum check_force_lowest_dependencies check_airflow_python_client_installation +initialize_db start_api_server_with_examples check_run_tests "${@}" @@ -1359,8 +1388,8 @@ COPY --from=scripts common.sh install_packaging_tools.sh install_additional_depe # Also use `force pip` label on your PR to swap all places we use `uv` to `pip` ARG AIRFLOW_PIP_VERSION=25.1.1 # ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" -ARG AIRFLOW_SETUPTOOLS_VERSION=80.8.0 -ARG AIRFLOW_UV_VERSION=0.7.8 +ARG AIRFLOW_SETUPTOOLS_VERSION=80.9.0 +ARG AIRFLOW_UV_VERSION=0.7.14 # TODO(potiuk): automate with upgrade check (possibly) ARG AIRFLOW_PRE_COMMIT_VERSION="4.2.0" ARG AIRFLOW_PRE_COMMIT_UV_VERSION="4.1.4" diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts index d94124efbe529..a7efa4868c859 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/common.ts @@ -1,697 +1,1911 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 - +// generated with @7nohe/openapi-react-query-codegen@1.6.2 import { UseQueryResult } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; + +import { + AssetService, + AuthLinksService, + BackfillService, + ConfigService, + ConnectionService, + DagParsingService, + DagReportService, + DagRunService, + DagService, + DagSourceService, + DagStatsService, + DagVersionService, + DagWarningService, + DashboardService, + DependenciesService, + EventLogService, + ExtraLinksService, + GridService, + ImportErrorService, + JobService, + LoginService, + MonitorService, + PluginService, + PoolService, + ProviderService, + StructureService, + TaskInstanceService, + TaskService, + VariableService, + VersionService, + XcomService, +} from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; + export type AssetServiceGetAssetsDefaultResponse = Awaited>; -export type AssetServiceGetAssetsQueryResult = UseQueryResult; +export type AssetServiceGetAssetsQueryResult< + TData = AssetServiceGetAssetsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useAssetServiceGetAssetsKey = "AssetServiceGetAssets"; -export const UseAssetServiceGetAssetsKeyFn = ({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }: { - dagIds?: string[]; - limit?: number; - namePattern?: string; - offset?: number; - onlyActive?: boolean; - orderBy?: string; - uriPattern?: string; -} = {}, queryKey?: Array) => [useAssetServiceGetAssetsKey, ...(queryKey ?? [{ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }])]; -export type AssetServiceGetAssetAliasesDefaultResponse = Awaited>; -export type AssetServiceGetAssetAliasesQueryResult = UseQueryResult; +export const UseAssetServiceGetAssetsKeyFn = ( + { + dagIds, + limit, + namePattern, + offset, + onlyActive, + orderBy, + uriPattern, + }: { + dagIds?: string[]; + limit?: number; + namePattern?: string; + offset?: number; + onlyActive?: boolean; + orderBy?: string; + uriPattern?: string; + } = {}, + queryKey?: Array, +) => [ + useAssetServiceGetAssetsKey, + ...(queryKey ?? [{ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }]), +]; +export type AssetServiceGetAssetAliasesDefaultResponse = Awaited< + ReturnType +>; +export type AssetServiceGetAssetAliasesQueryResult< + TData = AssetServiceGetAssetAliasesDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useAssetServiceGetAssetAliasesKey = "AssetServiceGetAssetAliases"; -export const UseAssetServiceGetAssetAliasesKeyFn = ({ limit, namePattern, offset, orderBy }: { - limit?: number; - namePattern?: string; - offset?: number; - orderBy?: string; -} = {}, queryKey?: Array) => [useAssetServiceGetAssetAliasesKey, ...(queryKey ?? [{ limit, namePattern, offset, orderBy }])]; +export const UseAssetServiceGetAssetAliasesKeyFn = ( + { + limit, + namePattern, + offset, + orderBy, + }: { + limit?: number; + namePattern?: string; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: Array, +) => [useAssetServiceGetAssetAliasesKey, ...(queryKey ?? [{ limit, namePattern, offset, orderBy }])]; export type AssetServiceGetAssetAliasDefaultResponse = Awaited>; -export type AssetServiceGetAssetAliasQueryResult = UseQueryResult; +export type AssetServiceGetAssetAliasQueryResult< + TData = AssetServiceGetAssetAliasDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useAssetServiceGetAssetAliasKey = "AssetServiceGetAssetAlias"; -export const UseAssetServiceGetAssetAliasKeyFn = ({ assetAliasId }: { - assetAliasId: number; -}, queryKey?: Array) => [useAssetServiceGetAssetAliasKey, ...(queryKey ?? [{ assetAliasId }])]; -export type AssetServiceGetAssetEventsDefaultResponse = Awaited>; -export type AssetServiceGetAssetEventsQueryResult = UseQueryResult; +export const UseAssetServiceGetAssetAliasKeyFn = ( + { + assetAliasId, + }: { + assetAliasId: number; + }, + queryKey?: Array, +) => [useAssetServiceGetAssetAliasKey, ...(queryKey ?? [{ assetAliasId }])]; +export type AssetServiceGetAssetEventsDefaultResponse = Awaited< + ReturnType +>; +export type AssetServiceGetAssetEventsQueryResult< + TData = AssetServiceGetAssetEventsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useAssetServiceGetAssetEventsKey = "AssetServiceGetAssetEvents"; -export const UseAssetServiceGetAssetEventsKeyFn = ({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }: { - assetId?: number; - limit?: number; - offset?: number; - orderBy?: string; - sourceDagId?: string; - sourceMapIndex?: number; - sourceRunId?: string; - sourceTaskId?: string; - timestampGte?: string; - timestampLte?: string; -} = {}, queryKey?: Array) => [useAssetServiceGetAssetEventsKey, ...(queryKey ?? [{ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }])]; -export type AssetServiceGetAssetQueuedEventsDefaultResponse = Awaited>; -export type AssetServiceGetAssetQueuedEventsQueryResult = UseQueryResult; +export const UseAssetServiceGetAssetEventsKeyFn = ( + { + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }: { + assetId?: number; + limit?: number; + offset?: number; + orderBy?: string; + sourceDagId?: string; + sourceMapIndex?: number; + sourceRunId?: string; + sourceTaskId?: string; + timestampGte?: string; + timestampLte?: string; + } = {}, + queryKey?: Array, +) => [ + useAssetServiceGetAssetEventsKey, + ...(queryKey ?? [ + { + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }, + ]), +]; +export type AssetServiceGetAssetQueuedEventsDefaultResponse = Awaited< + ReturnType +>; +export type AssetServiceGetAssetQueuedEventsQueryResult< + TData = AssetServiceGetAssetQueuedEventsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useAssetServiceGetAssetQueuedEventsKey = "AssetServiceGetAssetQueuedEvents"; -export const UseAssetServiceGetAssetQueuedEventsKeyFn = ({ assetId, before }: { - assetId: number; - before?: string; -}, queryKey?: Array) => [useAssetServiceGetAssetQueuedEventsKey, ...(queryKey ?? [{ assetId, before }])]; +export const UseAssetServiceGetAssetQueuedEventsKeyFn = ( + { + assetId, + before, + }: { + assetId: number; + before?: string; + }, + queryKey?: Array, +) => [useAssetServiceGetAssetQueuedEventsKey, ...(queryKey ?? [{ assetId, before }])]; export type AssetServiceGetAssetDefaultResponse = Awaited>; -export type AssetServiceGetAssetQueryResult = UseQueryResult; +export type AssetServiceGetAssetQueryResult< + TData = AssetServiceGetAssetDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useAssetServiceGetAssetKey = "AssetServiceGetAsset"; -export const UseAssetServiceGetAssetKeyFn = ({ assetId }: { - assetId: number; -}, queryKey?: Array) => [useAssetServiceGetAssetKey, ...(queryKey ?? [{ assetId }])]; -export type AssetServiceGetDagAssetQueuedEventsDefaultResponse = Awaited>; -export type AssetServiceGetDagAssetQueuedEventsQueryResult = UseQueryResult; +export const UseAssetServiceGetAssetKeyFn = ( + { + assetId, + }: { + assetId: number; + }, + queryKey?: Array, +) => [useAssetServiceGetAssetKey, ...(queryKey ?? [{ assetId }])]; +export type AssetServiceGetDagAssetQueuedEventsDefaultResponse = Awaited< + ReturnType +>; +export type AssetServiceGetDagAssetQueuedEventsQueryResult< + TData = AssetServiceGetDagAssetQueuedEventsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useAssetServiceGetDagAssetQueuedEventsKey = "AssetServiceGetDagAssetQueuedEvents"; -export const UseAssetServiceGetDagAssetQueuedEventsKeyFn = ({ before, dagId }: { - before?: string; - dagId: string; -}, queryKey?: Array) => [useAssetServiceGetDagAssetQueuedEventsKey, ...(queryKey ?? [{ before, dagId }])]; -export type AssetServiceGetDagAssetQueuedEventDefaultResponse = Awaited>; -export type AssetServiceGetDagAssetQueuedEventQueryResult = UseQueryResult; +export const UseAssetServiceGetDagAssetQueuedEventsKeyFn = ( + { + before, + dagId, + }: { + before?: string; + dagId: string; + }, + queryKey?: Array, +) => [useAssetServiceGetDagAssetQueuedEventsKey, ...(queryKey ?? [{ before, dagId }])]; +export type AssetServiceGetDagAssetQueuedEventDefaultResponse = Awaited< + ReturnType +>; +export type AssetServiceGetDagAssetQueuedEventQueryResult< + TData = AssetServiceGetDagAssetQueuedEventDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useAssetServiceGetDagAssetQueuedEventKey = "AssetServiceGetDagAssetQueuedEvent"; -export const UseAssetServiceGetDagAssetQueuedEventKeyFn = ({ assetId, before, dagId }: { - assetId: number; - before?: string; - dagId: string; -}, queryKey?: Array) => [useAssetServiceGetDagAssetQueuedEventKey, ...(queryKey ?? [{ assetId, before, dagId }])]; +export const UseAssetServiceGetDagAssetQueuedEventKeyFn = ( + { + assetId, + before, + dagId, + }: { + assetId: number; + before?: string; + dagId: string; + }, + queryKey?: Array, +) => [useAssetServiceGetDagAssetQueuedEventKey, ...(queryKey ?? [{ assetId, before, dagId }])]; export type AssetServiceNextRunAssetsDefaultResponse = Awaited>; -export type AssetServiceNextRunAssetsQueryResult = UseQueryResult; +export type AssetServiceNextRunAssetsQueryResult< + TData = AssetServiceNextRunAssetsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useAssetServiceNextRunAssetsKey = "AssetServiceNextRunAssets"; -export const UseAssetServiceNextRunAssetsKeyFn = ({ dagId }: { - dagId: string; -}, queryKey?: Array) => [useAssetServiceNextRunAssetsKey, ...(queryKey ?? [{ dagId }])]; -export type BackfillServiceListBackfillsDefaultResponse = Awaited>; -export type BackfillServiceListBackfillsQueryResult = UseQueryResult; +export const UseAssetServiceNextRunAssetsKeyFn = ( + { + dagId, + }: { + dagId: string; + }, + queryKey?: Array, +) => [useAssetServiceNextRunAssetsKey, ...(queryKey ?? [{ dagId }])]; +export type BackfillServiceListBackfillsDefaultResponse = Awaited< + ReturnType +>; +export type BackfillServiceListBackfillsQueryResult< + TData = BackfillServiceListBackfillsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useBackfillServiceListBackfillsKey = "BackfillServiceListBackfills"; -export const UseBackfillServiceListBackfillsKeyFn = ({ dagId, limit, offset, orderBy }: { - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; -}, queryKey?: Array) => [useBackfillServiceListBackfillsKey, ...(queryKey ?? [{ dagId, limit, offset, orderBy }])]; -export type BackfillServiceGetBackfillDefaultResponse = Awaited>; -export type BackfillServiceGetBackfillQueryResult = UseQueryResult; +export const UseBackfillServiceListBackfillsKeyFn = ( + { + dagId, + limit, + offset, + orderBy, + }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + }, + queryKey?: Array, +) => [useBackfillServiceListBackfillsKey, ...(queryKey ?? [{ dagId, limit, offset, orderBy }])]; +export type BackfillServiceGetBackfillDefaultResponse = Awaited< + ReturnType +>; +export type BackfillServiceGetBackfillQueryResult< + TData = BackfillServiceGetBackfillDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useBackfillServiceGetBackfillKey = "BackfillServiceGetBackfill"; -export const UseBackfillServiceGetBackfillKeyFn = ({ backfillId }: { - backfillId: number; -}, queryKey?: Array) => [useBackfillServiceGetBackfillKey, ...(queryKey ?? [{ backfillId }])]; -export type BackfillServiceListBackfills1DefaultResponse = Awaited>; -export type BackfillServiceListBackfills1QueryResult = UseQueryResult; +export const UseBackfillServiceGetBackfillKeyFn = ( + { + backfillId, + }: { + backfillId: number; + }, + queryKey?: Array, +) => [useBackfillServiceGetBackfillKey, ...(queryKey ?? [{ backfillId }])]; +export type BackfillServiceListBackfills1DefaultResponse = Awaited< + ReturnType +>; +export type BackfillServiceListBackfills1QueryResult< + TData = BackfillServiceListBackfills1DefaultResponse, + TError = unknown, +> = UseQueryResult; export const useBackfillServiceListBackfills1Key = "BackfillServiceListBackfills1"; -export const UseBackfillServiceListBackfills1KeyFn = ({ active, dagId, limit, offset, orderBy }: { - active?: boolean; - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; -} = {}, queryKey?: Array) => [useBackfillServiceListBackfills1Key, ...(queryKey ?? [{ active, dagId, limit, offset, orderBy }])]; -export type ConnectionServiceGetConnectionDefaultResponse = Awaited>; -export type ConnectionServiceGetConnectionQueryResult = UseQueryResult; +export const UseBackfillServiceListBackfills1KeyFn = ( + { + active, + dagId, + limit, + offset, + orderBy, + }: { + active?: boolean; + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: Array, +) => [useBackfillServiceListBackfills1Key, ...(queryKey ?? [{ active, dagId, limit, offset, orderBy }])]; +export type ConnectionServiceGetConnectionDefaultResponse = Awaited< + ReturnType +>; +export type ConnectionServiceGetConnectionQueryResult< + TData = ConnectionServiceGetConnectionDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useConnectionServiceGetConnectionKey = "ConnectionServiceGetConnection"; -export const UseConnectionServiceGetConnectionKeyFn = ({ connectionId }: { - connectionId: string; -}, queryKey?: Array) => [useConnectionServiceGetConnectionKey, ...(queryKey ?? [{ connectionId }])]; -export type ConnectionServiceGetConnectionsDefaultResponse = Awaited>; -export type ConnectionServiceGetConnectionsQueryResult = UseQueryResult; +export const UseConnectionServiceGetConnectionKeyFn = ( + { + connectionId, + }: { + connectionId: string; + }, + queryKey?: Array, +) => [useConnectionServiceGetConnectionKey, ...(queryKey ?? [{ connectionId }])]; +export type ConnectionServiceGetConnectionsDefaultResponse = Awaited< + ReturnType +>; +export type ConnectionServiceGetConnectionsQueryResult< + TData = ConnectionServiceGetConnectionsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useConnectionServiceGetConnectionsKey = "ConnectionServiceGetConnections"; -export const UseConnectionServiceGetConnectionsKeyFn = ({ connectionIdPattern, limit, offset, orderBy }: { - connectionIdPattern?: string; - limit?: number; - offset?: number; - orderBy?: string; -} = {}, queryKey?: Array) => [useConnectionServiceGetConnectionsKey, ...(queryKey ?? [{ connectionIdPattern, limit, offset, orderBy }])]; -export type ConnectionServiceHookMetaDataDefaultResponse = Awaited>; -export type ConnectionServiceHookMetaDataQueryResult = UseQueryResult; +export const UseConnectionServiceGetConnectionsKeyFn = ( + { + connectionIdPattern, + limit, + offset, + orderBy, + }: { + connectionIdPattern?: string; + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: Array, +) => [ + useConnectionServiceGetConnectionsKey, + ...(queryKey ?? [{ connectionIdPattern, limit, offset, orderBy }]), +]; +export type ConnectionServiceHookMetaDataDefaultResponse = Awaited< + ReturnType +>; +export type ConnectionServiceHookMetaDataQueryResult< + TData = ConnectionServiceHookMetaDataDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useConnectionServiceHookMetaDataKey = "ConnectionServiceHookMetaData"; -export const UseConnectionServiceHookMetaDataKeyFn = (queryKey?: Array) => [useConnectionServiceHookMetaDataKey, ...(queryKey ?? [])]; +export const UseConnectionServiceHookMetaDataKeyFn = (queryKey?: Array) => [ + useConnectionServiceHookMetaDataKey, + ...(queryKey ?? []), +]; export type DagRunServiceGetDagRunDefaultResponse = Awaited>; -export type DagRunServiceGetDagRunQueryResult = UseQueryResult; +export type DagRunServiceGetDagRunQueryResult< + TData = DagRunServiceGetDagRunDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagRunServiceGetDagRunKey = "DagRunServiceGetDagRun"; -export const UseDagRunServiceGetDagRunKeyFn = ({ dagId, dagRunId }: { - dagId: string; - dagRunId: string; -}, queryKey?: Array) => [useDagRunServiceGetDagRunKey, ...(queryKey ?? [{ dagId, dagRunId }])]; -export type DagRunServiceGetUpstreamAssetEventsDefaultResponse = Awaited>; -export type DagRunServiceGetUpstreamAssetEventsQueryResult = UseQueryResult; +export const UseDagRunServiceGetDagRunKeyFn = ( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: Array, +) => [useDagRunServiceGetDagRunKey, ...(queryKey ?? [{ dagId, dagRunId }])]; +export type DagRunServiceGetUpstreamAssetEventsDefaultResponse = Awaited< + ReturnType +>; +export type DagRunServiceGetUpstreamAssetEventsQueryResult< + TData = DagRunServiceGetUpstreamAssetEventsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagRunServiceGetUpstreamAssetEventsKey = "DagRunServiceGetUpstreamAssetEvents"; -export const UseDagRunServiceGetUpstreamAssetEventsKeyFn = ({ dagId, dagRunId }: { - dagId: string; - dagRunId: string; -}, queryKey?: Array) => [useDagRunServiceGetUpstreamAssetEventsKey, ...(queryKey ?? [{ dagId, dagRunId }])]; +export const UseDagRunServiceGetUpstreamAssetEventsKeyFn = ( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: Array, +) => [useDagRunServiceGetUpstreamAssetEventsKey, ...(queryKey ?? [{ dagId, dagRunId }])]; export type DagRunServiceGetDagRunsDefaultResponse = Awaited>; -export type DagRunServiceGetDagRunsQueryResult = UseQueryResult; +export type DagRunServiceGetDagRunsQueryResult< + TData = DagRunServiceGetDagRunsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagRunServiceGetDagRunsKey = "DagRunServiceGetDagRuns"; -export const UseDagRunServiceGetDagRunsKeyFn = ({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { - dagId: string; - endDateGte?: string; - endDateLte?: string; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - updatedAtGte?: string; - updatedAtLte?: string; -}, queryKey?: Array) => [useDagRunServiceGetDagRunsKey, ...(queryKey ?? [{ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }])]; -export type DagSourceServiceGetDagSourceDefaultResponse = Awaited>; -export type DagSourceServiceGetDagSourceQueryResult = UseQueryResult; +export const UseDagRunServiceGetDagRunsKeyFn = ( + { + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }: { + dagId: string; + endDateGte?: string; + endDateLte?: string; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + runAfterGte?: string; + runAfterLte?: string; + runType?: string[]; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + updatedAtGte?: string; + updatedAtLte?: string; + }, + queryKey?: Array, +) => [ + useDagRunServiceGetDagRunsKey, + ...(queryKey ?? [ + { + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }, + ]), +]; +export type DagSourceServiceGetDagSourceDefaultResponse = Awaited< + ReturnType +>; +export type DagSourceServiceGetDagSourceQueryResult< + TData = DagSourceServiceGetDagSourceDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagSourceServiceGetDagSourceKey = "DagSourceServiceGetDagSource"; -export const UseDagSourceServiceGetDagSourceKeyFn = ({ accept, dagId, versionNumber }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - versionNumber?: number; -}, queryKey?: Array) => [useDagSourceServiceGetDagSourceKey, ...(queryKey ?? [{ accept, dagId, versionNumber }])]; -export type DagStatsServiceGetDagStatsDefaultResponse = Awaited>; -export type DagStatsServiceGetDagStatsQueryResult = UseQueryResult; +export const UseDagSourceServiceGetDagSourceKeyFn = ( + { + accept, + dagId, + versionNumber, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + dagId: string; + versionNumber?: number; + }, + queryKey?: Array, +) => [useDagSourceServiceGetDagSourceKey, ...(queryKey ?? [{ accept, dagId, versionNumber }])]; +export type DagStatsServiceGetDagStatsDefaultResponse = Awaited< + ReturnType +>; +export type DagStatsServiceGetDagStatsQueryResult< + TData = DagStatsServiceGetDagStatsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagStatsServiceGetDagStatsKey = "DagStatsServiceGetDagStats"; -export const UseDagStatsServiceGetDagStatsKeyFn = ({ dagIds }: { - dagIds?: string[]; -} = {}, queryKey?: Array) => [useDagStatsServiceGetDagStatsKey, ...(queryKey ?? [{ dagIds }])]; -export type DagReportServiceGetDagReportsDefaultResponse = Awaited>; -export type DagReportServiceGetDagReportsQueryResult = UseQueryResult; +export const UseDagStatsServiceGetDagStatsKeyFn = ( + { + dagIds, + }: { + dagIds?: string[]; + } = {}, + queryKey?: Array, +) => [useDagStatsServiceGetDagStatsKey, ...(queryKey ?? [{ dagIds }])]; +export type DagReportServiceGetDagReportsDefaultResponse = Awaited< + ReturnType +>; +export type DagReportServiceGetDagReportsQueryResult< + TData = DagReportServiceGetDagReportsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagReportServiceGetDagReportsKey = "DagReportServiceGetDagReports"; -export const UseDagReportServiceGetDagReportsKeyFn = ({ subdir }: { - subdir: string; -}, queryKey?: Array) => [useDagReportServiceGetDagReportsKey, ...(queryKey ?? [{ subdir }])]; +export const UseDagReportServiceGetDagReportsKeyFn = ( + { + subdir, + }: { + subdir: string; + }, + queryKey?: Array, +) => [useDagReportServiceGetDagReportsKey, ...(queryKey ?? [{ subdir }])]; export type ConfigServiceGetConfigDefaultResponse = Awaited>; -export type ConfigServiceGetConfigQueryResult = UseQueryResult; +export type ConfigServiceGetConfigQueryResult< + TData = ConfigServiceGetConfigDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useConfigServiceGetConfigKey = "ConfigServiceGetConfig"; -export const UseConfigServiceGetConfigKeyFn = ({ accept, section }: { - accept?: "application/json" | "text/plain" | "*/*"; - section?: string; -} = {}, queryKey?: Array) => [useConfigServiceGetConfigKey, ...(queryKey ?? [{ accept, section }])]; -export type ConfigServiceGetConfigValueDefaultResponse = Awaited>; -export type ConfigServiceGetConfigValueQueryResult = UseQueryResult; +export const UseConfigServiceGetConfigKeyFn = ( + { + accept, + section, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + section?: string; + } = {}, + queryKey?: Array, +) => [useConfigServiceGetConfigKey, ...(queryKey ?? [{ accept, section }])]; +export type ConfigServiceGetConfigValueDefaultResponse = Awaited< + ReturnType +>; +export type ConfigServiceGetConfigValueQueryResult< + TData = ConfigServiceGetConfigValueDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useConfigServiceGetConfigValueKey = "ConfigServiceGetConfigValue"; -export const UseConfigServiceGetConfigValueKeyFn = ({ accept, option, section }: { - accept?: "application/json" | "text/plain" | "*/*"; - option: string; - section: string; -}, queryKey?: Array) => [useConfigServiceGetConfigValueKey, ...(queryKey ?? [{ accept, option, section }])]; +export const UseConfigServiceGetConfigValueKeyFn = ( + { + accept, + option, + section, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + option: string; + section: string; + }, + queryKey?: Array, +) => [useConfigServiceGetConfigValueKey, ...(queryKey ?? [{ accept, option, section }])]; export type ConfigServiceGetConfigsDefaultResponse = Awaited>; -export type ConfigServiceGetConfigsQueryResult = UseQueryResult; +export type ConfigServiceGetConfigsQueryResult< + TData = ConfigServiceGetConfigsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useConfigServiceGetConfigsKey = "ConfigServiceGetConfigs"; -export const UseConfigServiceGetConfigsKeyFn = (queryKey?: Array) => [useConfigServiceGetConfigsKey, ...(queryKey ?? [])]; -export type DagWarningServiceListDagWarningsDefaultResponse = Awaited>; -export type DagWarningServiceListDagWarningsQueryResult = UseQueryResult; +export const UseConfigServiceGetConfigsKeyFn = (queryKey?: Array) => [ + useConfigServiceGetConfigsKey, + ...(queryKey ?? []), +]; +export type DagWarningServiceListDagWarningsDefaultResponse = Awaited< + ReturnType +>; +export type DagWarningServiceListDagWarningsQueryResult< + TData = DagWarningServiceListDagWarningsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagWarningServiceListDagWarningsKey = "DagWarningServiceListDagWarnings"; -export const UseDagWarningServiceListDagWarningsKeyFn = ({ dagId, limit, offset, orderBy, warningType }: { - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - warningType?: DagWarningType; -} = {}, queryKey?: Array) => [useDagWarningServiceListDagWarningsKey, ...(queryKey ?? [{ dagId, limit, offset, orderBy, warningType }])]; +export const UseDagWarningServiceListDagWarningsKeyFn = ( + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, + queryKey?: Array, +) => [ + useDagWarningServiceListDagWarningsKey, + ...(queryKey ?? [{ dagId, limit, offset, orderBy, warningType }]), +]; export type DagServiceGetDagsDefaultResponse = Awaited>; -export type DagServiceGetDagsQueryResult = UseQueryResult; +export type DagServiceGetDagsQueryResult< + TData = DagServiceGetDagsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagServiceGetDagsKey = "DagServiceGetDags"; -export const UseDagServiceGetDagsKeyFn = ({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagRunEndDateGte?: string; - dagRunEndDateLte?: string; - dagRunStartDateGte?: string; - dagRunStartDateLte?: string; - dagRunState?: string[]; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - orderBy?: string; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; -} = {}, queryKey?: Array) => [useDagServiceGetDagsKey, ...(queryKey ?? [{ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }])]; +export const UseDagServiceGetDagsKeyFn = ( + { + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunEndDateGte?: string; + dagRunEndDateLte?: string; + dagRunStartDateGte?: string; + dagRunStartDateLte?: string; + dagRunState?: string[]; + excludeStale?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; + } = {}, + queryKey?: Array, +) => [ + useDagServiceGetDagsKey, + ...(queryKey ?? [ + { + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }, + ]), +]; export type DagServiceGetDagDefaultResponse = Awaited>; -export type DagServiceGetDagQueryResult = UseQueryResult; +export type DagServiceGetDagQueryResult< + TData = DagServiceGetDagDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagServiceGetDagKey = "DagServiceGetDag"; -export const UseDagServiceGetDagKeyFn = ({ dagId }: { - dagId: string; -}, queryKey?: Array) => [useDagServiceGetDagKey, ...(queryKey ?? [{ dagId }])]; +export const UseDagServiceGetDagKeyFn = ( + { + dagId, + }: { + dagId: string; + }, + queryKey?: Array, +) => [useDagServiceGetDagKey, ...(queryKey ?? [{ dagId }])]; export type DagServiceGetDagDetailsDefaultResponse = Awaited>; -export type DagServiceGetDagDetailsQueryResult = UseQueryResult; +export type DagServiceGetDagDetailsQueryResult< + TData = DagServiceGetDagDetailsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagServiceGetDagDetailsKey = "DagServiceGetDagDetails"; -export const UseDagServiceGetDagDetailsKeyFn = ({ dagId }: { - dagId: string; -}, queryKey?: Array) => [useDagServiceGetDagDetailsKey, ...(queryKey ?? [{ dagId }])]; +export const UseDagServiceGetDagDetailsKeyFn = ( + { + dagId, + }: { + dagId: string; + }, + queryKey?: Array, +) => [useDagServiceGetDagDetailsKey, ...(queryKey ?? [{ dagId }])]; export type DagServiceGetDagTagsDefaultResponse = Awaited>; -export type DagServiceGetDagTagsQueryResult = UseQueryResult; +export type DagServiceGetDagTagsQueryResult< + TData = DagServiceGetDagTagsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagServiceGetDagTagsKey = "DagServiceGetDagTags"; -export const UseDagServiceGetDagTagsKeyFn = ({ limit, offset, orderBy, tagNamePattern }: { - limit?: number; - offset?: number; - orderBy?: string; - tagNamePattern?: string; -} = {}, queryKey?: Array) => [useDagServiceGetDagTagsKey, ...(queryKey ?? [{ limit, offset, orderBy, tagNamePattern }])]; +export const UseDagServiceGetDagTagsKeyFn = ( + { + limit, + offset, + orderBy, + tagNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + tagNamePattern?: string; + } = {}, + queryKey?: Array, +) => [useDagServiceGetDagTagsKey, ...(queryKey ?? [{ limit, offset, orderBy, tagNamePattern }])]; export type DagServiceRecentDagRunsDefaultResponse = Awaited>; -export type DagServiceRecentDagRunsQueryResult = UseQueryResult; +export type DagServiceRecentDagRunsQueryResult< + TData = DagServiceRecentDagRunsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagServiceRecentDagRunsKey = "DagServiceRecentDagRuns"; -export const UseDagServiceRecentDagRunsKeyFn = ({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagIds?: string[]; - dagRunsLimit?: number; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; -} = {}, queryKey?: Array) => [useDagServiceRecentDagRunsKey, ...(queryKey ?? [{ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }])]; -export type EventLogServiceGetEventLogDefaultResponse = Awaited>; -export type EventLogServiceGetEventLogQueryResult = UseQueryResult; +export const UseDagServiceRecentDagRunsKeyFn = ( + { + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagIds?: string[]; + dagRunsLimit?: number; + excludeStale?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; + } = {}, + queryKey?: Array, +) => [ + useDagServiceRecentDagRunsKey, + ...(queryKey ?? [ + { + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }, + ]), +]; +export type EventLogServiceGetEventLogDefaultResponse = Awaited< + ReturnType +>; +export type EventLogServiceGetEventLogQueryResult< + TData = EventLogServiceGetEventLogDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useEventLogServiceGetEventLogKey = "EventLogServiceGetEventLog"; -export const UseEventLogServiceGetEventLogKeyFn = ({ eventLogId }: { - eventLogId: number; -}, queryKey?: Array) => [useEventLogServiceGetEventLogKey, ...(queryKey ?? [{ eventLogId }])]; -export type EventLogServiceGetEventLogsDefaultResponse = Awaited>; -export type EventLogServiceGetEventLogsQueryResult = UseQueryResult; +export const UseEventLogServiceGetEventLogKeyFn = ( + { + eventLogId, + }: { + eventLogId: number; + }, + queryKey?: Array, +) => [useEventLogServiceGetEventLogKey, ...(queryKey ?? [{ eventLogId }])]; +export type EventLogServiceGetEventLogsDefaultResponse = Awaited< + ReturnType +>; +export type EventLogServiceGetEventLogsQueryResult< + TData = EventLogServiceGetEventLogsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useEventLogServiceGetEventLogsKey = "EventLogServiceGetEventLogs"; -export const UseEventLogServiceGetEventLogsKeyFn = ({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }: { - after?: string; - before?: string; - dagId?: string; - event?: string; - excludedEvents?: string[]; - includedEvents?: string[]; - limit?: number; - mapIndex?: number; - offset?: number; - orderBy?: string; - owner?: string; - runId?: string; - taskId?: string; - tryNumber?: number; -} = {}, queryKey?: Array) => [useEventLogServiceGetEventLogsKey, ...(queryKey ?? [{ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }])]; -export type ExtraLinksServiceGetExtraLinksDefaultResponse = Awaited>; -export type ExtraLinksServiceGetExtraLinksQueryResult = UseQueryResult; +export const UseEventLogServiceGetEventLogsKeyFn = ( + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; + } = {}, + queryKey?: Array, +) => [ + useEventLogServiceGetEventLogsKey, + ...(queryKey ?? [ + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }, + ]), +]; +export type ExtraLinksServiceGetExtraLinksDefaultResponse = Awaited< + ReturnType +>; +export type ExtraLinksServiceGetExtraLinksQueryResult< + TData = ExtraLinksServiceGetExtraLinksDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useExtraLinksServiceGetExtraLinksKey = "ExtraLinksServiceGetExtraLinks"; -export const UseExtraLinksServiceGetExtraLinksKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}, queryKey?: Array) => [useExtraLinksServiceGetExtraLinksKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; -export type TaskInstanceServiceGetExtraLinksDefaultResponse = Awaited>; -export type TaskInstanceServiceGetExtraLinksQueryResult = UseQueryResult; +export const UseExtraLinksServiceGetExtraLinksKeyFn = ( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: Array, +) => [useExtraLinksServiceGetExtraLinksKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; +export type TaskInstanceServiceGetExtraLinksDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetExtraLinksQueryResult< + TData = TaskInstanceServiceGetExtraLinksDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useTaskInstanceServiceGetExtraLinksKey = "TaskInstanceServiceGetExtraLinks"; -export const UseTaskInstanceServiceGetExtraLinksKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}, queryKey?: Array) => [useTaskInstanceServiceGetExtraLinksKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; -export type TaskInstanceServiceGetTaskInstanceDefaultResponse = Awaited>; -export type TaskInstanceServiceGetTaskInstanceQueryResult = UseQueryResult; +export const UseTaskInstanceServiceGetExtraLinksKeyFn = ( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: Array, +) => [useTaskInstanceServiceGetExtraLinksKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; +export type TaskInstanceServiceGetTaskInstanceDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetTaskInstanceQueryResult< + TData = TaskInstanceServiceGetTaskInstanceDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useTaskInstanceServiceGetTaskInstanceKey = "TaskInstanceServiceGetTaskInstance"; -export const UseTaskInstanceServiceGetTaskInstanceKeyFn = ({ dagId, dagRunId, taskId }: { - dagId: string; - dagRunId: string; - taskId: string; -}, queryKey?: Array) => [useTaskInstanceServiceGetTaskInstanceKey, ...(queryKey ?? [{ dagId, dagRunId, taskId }])]; -export type TaskInstanceServiceGetMappedTaskInstancesDefaultResponse = Awaited>; -export type TaskInstanceServiceGetMappedTaskInstancesQueryResult = UseQueryResult; +export const UseTaskInstanceServiceGetTaskInstanceKeyFn = ( + { + dagId, + dagRunId, + taskId, + }: { + dagId: string; + dagRunId: string; + taskId: string; + }, + queryKey?: Array, +) => [useTaskInstanceServiceGetTaskInstanceKey, ...(queryKey ?? [{ dagId, dagRunId, taskId }])]; +export type TaskInstanceServiceGetMappedTaskInstancesDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetMappedTaskInstancesQueryResult< + TData = TaskInstanceServiceGetMappedTaskInstancesDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useTaskInstanceServiceGetMappedTaskInstancesKey = "TaskInstanceServiceGetMappedTaskInstances"; -export const UseTaskInstanceServiceGetMappedTaskInstancesKeyFn = ({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskId: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; -}, queryKey?: Array) => [useTaskInstanceServiceGetMappedTaskInstancesKey, ...(queryKey ?? [{ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }])]; -export type TaskInstanceServiceGetTaskInstanceDependenciesByMapIndexDefaultResponse = Awaited>; -export type TaskInstanceServiceGetTaskInstanceDependenciesByMapIndexQueryResult = UseQueryResult; -export const useTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKey = "TaskInstanceServiceGetTaskInstanceDependenciesByMapIndex"; -export const UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}, queryKey?: Array) => [useTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; -export type TaskInstanceServiceGetTaskInstanceDependenciesDefaultResponse = Awaited>; -export type TaskInstanceServiceGetTaskInstanceDependenciesQueryResult = UseQueryResult; -export const useTaskInstanceServiceGetTaskInstanceDependenciesKey = "TaskInstanceServiceGetTaskInstanceDependencies"; -export const UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}, queryKey?: Array) => [useTaskInstanceServiceGetTaskInstanceDependenciesKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; -export type TaskInstanceServiceGetTaskInstanceTriesDefaultResponse = Awaited>; -export type TaskInstanceServiceGetTaskInstanceTriesQueryResult = UseQueryResult; +export const UseTaskInstanceServiceGetMappedTaskInstancesKeyFn = ( + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskId: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetMappedTaskInstancesKey, + ...(queryKey ?? [ + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }, + ]), +]; +export type TaskInstanceServiceGetTaskInstanceDependenciesByMapIndexDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetTaskInstanceDependenciesByMapIndexQueryResult< + TData = TaskInstanceServiceGetTaskInstanceDependenciesByMapIndexDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKey = + "TaskInstanceServiceGetTaskInstanceDependenciesByMapIndex"; +export const UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn = ( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKey, + ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), +]; +export type TaskInstanceServiceGetTaskInstanceDependenciesDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetTaskInstanceDependenciesQueryResult< + TData = TaskInstanceServiceGetTaskInstanceDependenciesDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useTaskInstanceServiceGetTaskInstanceDependenciesKey = + "TaskInstanceServiceGetTaskInstanceDependencies"; +export const UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn = ( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetTaskInstanceDependenciesKey, + ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), +]; +export type TaskInstanceServiceGetTaskInstanceTriesDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetTaskInstanceTriesQueryResult< + TData = TaskInstanceServiceGetTaskInstanceTriesDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useTaskInstanceServiceGetTaskInstanceTriesKey = "TaskInstanceServiceGetTaskInstanceTries"; -export const UseTaskInstanceServiceGetTaskInstanceTriesKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}, queryKey?: Array) => [useTaskInstanceServiceGetTaskInstanceTriesKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; -export type TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse = Awaited>; -export type TaskInstanceServiceGetMappedTaskInstanceTriesQueryResult = UseQueryResult; -export const useTaskInstanceServiceGetMappedTaskInstanceTriesKey = "TaskInstanceServiceGetMappedTaskInstanceTries"; -export const UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}, queryKey?: Array) => [useTaskInstanceServiceGetMappedTaskInstanceTriesKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; -export type TaskInstanceServiceGetMappedTaskInstanceDefaultResponse = Awaited>; -export type TaskInstanceServiceGetMappedTaskInstanceQueryResult = UseQueryResult; +export const UseTaskInstanceServiceGetTaskInstanceTriesKeyFn = ( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetTaskInstanceTriesKey, + ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), +]; +export type TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetMappedTaskInstanceTriesQueryResult< + TData = TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useTaskInstanceServiceGetMappedTaskInstanceTriesKey = + "TaskInstanceServiceGetMappedTaskInstanceTries"; +export const UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn = ( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetMappedTaskInstanceTriesKey, + ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), +]; +export type TaskInstanceServiceGetMappedTaskInstanceDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetMappedTaskInstanceQueryResult< + TData = TaskInstanceServiceGetMappedTaskInstanceDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useTaskInstanceServiceGetMappedTaskInstanceKey = "TaskInstanceServiceGetMappedTaskInstance"; -export const UseTaskInstanceServiceGetMappedTaskInstanceKeyFn = ({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}, queryKey?: Array) => [useTaskInstanceServiceGetMappedTaskInstanceKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }])]; -export type TaskInstanceServiceGetTaskInstancesDefaultResponse = Awaited>; -export type TaskInstanceServiceGetTaskInstancesQueryResult = UseQueryResult; +export const UseTaskInstanceServiceGetMappedTaskInstanceKeyFn = ( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetMappedTaskInstanceKey, + ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), +]; +export type TaskInstanceServiceGetTaskInstancesDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetTaskInstancesQueryResult< + TData = TaskInstanceServiceGetTaskInstancesDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useTaskInstanceServiceGetTaskInstancesKey = "TaskInstanceServiceGetTaskInstances"; -export const UseTaskInstanceServiceGetTaskInstancesKeyFn = ({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskDisplayNamePattern?: string; - taskId?: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; -}, queryKey?: Array) => [useTaskInstanceServiceGetTaskInstancesKey, ...(queryKey ?? [{ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }])]; -export type TaskInstanceServiceGetTaskInstanceTryDetailsDefaultResponse = Awaited>; -export type TaskInstanceServiceGetTaskInstanceTryDetailsQueryResult = UseQueryResult; -export const useTaskInstanceServiceGetTaskInstanceTryDetailsKey = "TaskInstanceServiceGetTaskInstanceTryDetails"; -export const UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn = ({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - taskTryNumber: number; -}, queryKey?: Array) => [useTaskInstanceServiceGetTaskInstanceTryDetailsKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId, taskTryNumber }])]; -export type TaskInstanceServiceGetMappedTaskInstanceTryDetailsDefaultResponse = Awaited>; -export type TaskInstanceServiceGetMappedTaskInstanceTryDetailsQueryResult = UseQueryResult; -export const useTaskInstanceServiceGetMappedTaskInstanceTryDetailsKey = "TaskInstanceServiceGetMappedTaskInstanceTryDetails"; -export const UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn = ({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - taskTryNumber: number; -}, queryKey?: Array) => [useTaskInstanceServiceGetMappedTaskInstanceTryDetailsKey, ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId, taskTryNumber }])]; +export const UseTaskInstanceServiceGetTaskInstancesKeyFn = ( + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskDisplayNamePattern?: string; + taskId?: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetTaskInstancesKey, + ...(queryKey ?? [ + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }, + ]), +]; +export type TaskInstanceServiceGetTaskInstanceTryDetailsDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetTaskInstanceTryDetailsQueryResult< + TData = TaskInstanceServiceGetTaskInstanceTryDetailsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useTaskInstanceServiceGetTaskInstanceTryDetailsKey = + "TaskInstanceServiceGetTaskInstanceTryDetails"; +export const UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn = ( + { + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + taskTryNumber: number; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetTaskInstanceTryDetailsKey, + ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId, taskTryNumber }]), +]; +export type TaskInstanceServiceGetMappedTaskInstanceTryDetailsDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetMappedTaskInstanceTryDetailsQueryResult< + TData = TaskInstanceServiceGetMappedTaskInstanceTryDetailsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useTaskInstanceServiceGetMappedTaskInstanceTryDetailsKey = + "TaskInstanceServiceGetMappedTaskInstanceTryDetails"; +export const UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn = ( + { + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + taskTryNumber: number; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetMappedTaskInstanceTryDetailsKey, + ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId, taskTryNumber }]), +]; export type TaskInstanceServiceGetLogDefaultResponse = Awaited>; -export type TaskInstanceServiceGetLogQueryResult = UseQueryResult; +export type TaskInstanceServiceGetLogQueryResult< + TData = TaskInstanceServiceGetLogDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useTaskInstanceServiceGetLogKey = "TaskInstanceServiceGetLog"; -export const UseTaskInstanceServiceGetLogKeyFn = ({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }: { - accept?: "application/json" | "*/*" | "application/x-ndjson"; - dagId: string; - dagRunId: string; - fullContent?: boolean; - mapIndex?: number; - taskId: string; - token?: string; - tryNumber: number; -}, queryKey?: Array) => [useTaskInstanceServiceGetLogKey, ...(queryKey ?? [{ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }])]; -export type ImportErrorServiceGetImportErrorDefaultResponse = Awaited>; -export type ImportErrorServiceGetImportErrorQueryResult = UseQueryResult; +export const UseTaskInstanceServiceGetLogKeyFn = ( + { + accept, + dagId, + dagRunId, + fullContent, + mapIndex, + taskId, + token, + tryNumber, + }: { + accept?: "application/json" | "*/*" | "application/x-ndjson"; + dagId: string; + dagRunId: string; + fullContent?: boolean; + mapIndex?: number; + taskId: string; + token?: string; + tryNumber: number; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetLogKey, + ...(queryKey ?? [{ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }]), +]; +export type ImportErrorServiceGetImportErrorDefaultResponse = Awaited< + ReturnType +>; +export type ImportErrorServiceGetImportErrorQueryResult< + TData = ImportErrorServiceGetImportErrorDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useImportErrorServiceGetImportErrorKey = "ImportErrorServiceGetImportError"; -export const UseImportErrorServiceGetImportErrorKeyFn = ({ importErrorId }: { - importErrorId: number; -}, queryKey?: Array) => [useImportErrorServiceGetImportErrorKey, ...(queryKey ?? [{ importErrorId }])]; -export type ImportErrorServiceGetImportErrorsDefaultResponse = Awaited>; -export type ImportErrorServiceGetImportErrorsQueryResult = UseQueryResult; +export const UseImportErrorServiceGetImportErrorKeyFn = ( + { + importErrorId, + }: { + importErrorId: number; + }, + queryKey?: Array, +) => [useImportErrorServiceGetImportErrorKey, ...(queryKey ?? [{ importErrorId }])]; +export type ImportErrorServiceGetImportErrorsDefaultResponse = Awaited< + ReturnType +>; +export type ImportErrorServiceGetImportErrorsQueryResult< + TData = ImportErrorServiceGetImportErrorsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useImportErrorServiceGetImportErrorsKey = "ImportErrorServiceGetImportErrors"; -export const UseImportErrorServiceGetImportErrorsKeyFn = ({ limit, offset, orderBy }: { - limit?: number; - offset?: number; - orderBy?: string; -} = {}, queryKey?: Array) => [useImportErrorServiceGetImportErrorsKey, ...(queryKey ?? [{ limit, offset, orderBy }])]; +export const UseImportErrorServiceGetImportErrorsKeyFn = ( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: Array, +) => [useImportErrorServiceGetImportErrorsKey, ...(queryKey ?? [{ limit, offset, orderBy }])]; export type JobServiceGetJobsDefaultResponse = Awaited>; -export type JobServiceGetJobsQueryResult = UseQueryResult; +export type JobServiceGetJobsQueryResult< + TData = JobServiceGetJobsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useJobServiceGetJobsKey = "JobServiceGetJobs"; -export const UseJobServiceGetJobsKeyFn = ({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }: { - endDateGte?: string; - endDateLte?: string; - executorClass?: string; - hostname?: string; - isAlive?: boolean; - jobState?: string; - jobType?: string; - limit?: number; - offset?: number; - orderBy?: string; - startDateGte?: string; - startDateLte?: string; -} = {}, queryKey?: Array) => [useJobServiceGetJobsKey, ...(queryKey ?? [{ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }])]; +export const UseJobServiceGetJobsKeyFn = ( + { + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }: { + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string; + startDateGte?: string; + startDateLte?: string; + } = {}, + queryKey?: Array, +) => [ + useJobServiceGetJobsKey, + ...(queryKey ?? [ + { + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }, + ]), +]; export type PluginServiceGetPluginsDefaultResponse = Awaited>; -export type PluginServiceGetPluginsQueryResult = UseQueryResult; +export type PluginServiceGetPluginsQueryResult< + TData = PluginServiceGetPluginsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const usePluginServiceGetPluginsKey = "PluginServiceGetPlugins"; -export const UsePluginServiceGetPluginsKeyFn = ({ limit, offset }: { - limit?: number; - offset?: number; -} = {}, queryKey?: Array) => [usePluginServiceGetPluginsKey, ...(queryKey ?? [{ limit, offset }])]; +export const UsePluginServiceGetPluginsKeyFn = ( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: Array, +) => [usePluginServiceGetPluginsKey, ...(queryKey ?? [{ limit, offset }])]; export type PoolServiceGetPoolDefaultResponse = Awaited>; -export type PoolServiceGetPoolQueryResult = UseQueryResult; +export type PoolServiceGetPoolQueryResult< + TData = PoolServiceGetPoolDefaultResponse, + TError = unknown, +> = UseQueryResult; export const usePoolServiceGetPoolKey = "PoolServiceGetPool"; -export const UsePoolServiceGetPoolKeyFn = ({ poolName }: { - poolName: string; -}, queryKey?: Array) => [usePoolServiceGetPoolKey, ...(queryKey ?? [{ poolName }])]; +export const UsePoolServiceGetPoolKeyFn = ( + { + poolName, + }: { + poolName: string; + }, + queryKey?: Array, +) => [usePoolServiceGetPoolKey, ...(queryKey ?? [{ poolName }])]; export type PoolServiceGetPoolsDefaultResponse = Awaited>; -export type PoolServiceGetPoolsQueryResult = UseQueryResult; +export type PoolServiceGetPoolsQueryResult< + TData = PoolServiceGetPoolsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const usePoolServiceGetPoolsKey = "PoolServiceGetPools"; -export const UsePoolServiceGetPoolsKeyFn = ({ limit, offset, orderBy, poolNamePattern }: { - limit?: number; - offset?: number; - orderBy?: string; - poolNamePattern?: string; -} = {}, queryKey?: Array) => [usePoolServiceGetPoolsKey, ...(queryKey ?? [{ limit, offset, orderBy, poolNamePattern }])]; -export type ProviderServiceGetProvidersDefaultResponse = Awaited>; -export type ProviderServiceGetProvidersQueryResult = UseQueryResult; +export const UsePoolServiceGetPoolsKeyFn = ( + { + limit, + offset, + orderBy, + poolNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + poolNamePattern?: string; + } = {}, + queryKey?: Array, +) => [usePoolServiceGetPoolsKey, ...(queryKey ?? [{ limit, offset, orderBy, poolNamePattern }])]; +export type ProviderServiceGetProvidersDefaultResponse = Awaited< + ReturnType +>; +export type ProviderServiceGetProvidersQueryResult< + TData = ProviderServiceGetProvidersDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useProviderServiceGetProvidersKey = "ProviderServiceGetProviders"; -export const UseProviderServiceGetProvidersKeyFn = ({ limit, offset }: { - limit?: number; - offset?: number; -} = {}, queryKey?: Array) => [useProviderServiceGetProvidersKey, ...(queryKey ?? [{ limit, offset }])]; +export const UseProviderServiceGetProvidersKeyFn = ( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: Array, +) => [useProviderServiceGetProvidersKey, ...(queryKey ?? [{ limit, offset }])]; export type XcomServiceGetXcomEntryDefaultResponse = Awaited>; -export type XcomServiceGetXcomEntryQueryResult = UseQueryResult; +export type XcomServiceGetXcomEntryQueryResult< + TData = XcomServiceGetXcomEntryDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useXcomServiceGetXcomEntryKey = "XcomServiceGetXcomEntry"; -export const UseXcomServiceGetXcomEntryKeyFn = ({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }: { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; -}, queryKey?: Array) => [useXcomServiceGetXcomEntryKey, ...(queryKey ?? [{ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }])]; +export const UseXcomServiceGetXcomEntryKeyFn = ( + { + dagId, + dagRunId, + deserialize, + mapIndex, + stringify, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; + }, + queryKey?: Array, +) => [ + useXcomServiceGetXcomEntryKey, + ...(queryKey ?? [{ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }]), +]; export type XcomServiceGetXcomEntriesDefaultResponse = Awaited>; -export type XcomServiceGetXcomEntriesQueryResult = UseQueryResult; +export type XcomServiceGetXcomEntriesQueryResult< + TData = XcomServiceGetXcomEntriesDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useXcomServiceGetXcomEntriesKey = "XcomServiceGetXcomEntries"; -export const UseXcomServiceGetXcomEntriesKeyFn = ({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }: { - dagId: string; - dagRunId: string; - limit?: number; - mapIndex?: number; - offset?: number; - taskId: string; - xcomKey?: string; -}, queryKey?: Array) => [useXcomServiceGetXcomEntriesKey, ...(queryKey ?? [{ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }])]; +export const UseXcomServiceGetXcomEntriesKeyFn = ( + { + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; + }, + queryKey?: Array, +) => [ + useXcomServiceGetXcomEntriesKey, + ...(queryKey ?? [{ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }]), +]; export type TaskServiceGetTasksDefaultResponse = Awaited>; -export type TaskServiceGetTasksQueryResult = UseQueryResult; +export type TaskServiceGetTasksQueryResult< + TData = TaskServiceGetTasksDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useTaskServiceGetTasksKey = "TaskServiceGetTasks"; -export const UseTaskServiceGetTasksKeyFn = ({ dagId, orderBy }: { - dagId: string; - orderBy?: string; -}, queryKey?: Array) => [useTaskServiceGetTasksKey, ...(queryKey ?? [{ dagId, orderBy }])]; +export const UseTaskServiceGetTasksKeyFn = ( + { + dagId, + orderBy, + }: { + dagId: string; + orderBy?: string; + }, + queryKey?: Array, +) => [useTaskServiceGetTasksKey, ...(queryKey ?? [{ dagId, orderBy }])]; export type TaskServiceGetTaskDefaultResponse = Awaited>; -export type TaskServiceGetTaskQueryResult = UseQueryResult; +export type TaskServiceGetTaskQueryResult< + TData = TaskServiceGetTaskDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useTaskServiceGetTaskKey = "TaskServiceGetTask"; -export const UseTaskServiceGetTaskKeyFn = ({ dagId, taskId }: { - dagId: string; - taskId: unknown; -}, queryKey?: Array) => [useTaskServiceGetTaskKey, ...(queryKey ?? [{ dagId, taskId }])]; -export type VariableServiceGetVariableDefaultResponse = Awaited>; -export type VariableServiceGetVariableQueryResult = UseQueryResult; +export const UseTaskServiceGetTaskKeyFn = ( + { + dagId, + taskId, + }: { + dagId: string; + taskId: unknown; + }, + queryKey?: Array, +) => [useTaskServiceGetTaskKey, ...(queryKey ?? [{ dagId, taskId }])]; +export type VariableServiceGetVariableDefaultResponse = Awaited< + ReturnType +>; +export type VariableServiceGetVariableQueryResult< + TData = VariableServiceGetVariableDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useVariableServiceGetVariableKey = "VariableServiceGetVariable"; -export const UseVariableServiceGetVariableKeyFn = ({ variableKey }: { - variableKey: string; -}, queryKey?: Array) => [useVariableServiceGetVariableKey, ...(queryKey ?? [{ variableKey }])]; -export type VariableServiceGetVariablesDefaultResponse = Awaited>; -export type VariableServiceGetVariablesQueryResult = UseQueryResult; +export const UseVariableServiceGetVariableKeyFn = ( + { + variableKey, + }: { + variableKey: string; + }, + queryKey?: Array, +) => [useVariableServiceGetVariableKey, ...(queryKey ?? [{ variableKey }])]; +export type VariableServiceGetVariablesDefaultResponse = Awaited< + ReturnType +>; +export type VariableServiceGetVariablesQueryResult< + TData = VariableServiceGetVariablesDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useVariableServiceGetVariablesKey = "VariableServiceGetVariables"; -export const UseVariableServiceGetVariablesKeyFn = ({ limit, offset, orderBy, variableKeyPattern }: { - limit?: number; - offset?: number; - orderBy?: string; - variableKeyPattern?: string; -} = {}, queryKey?: Array) => [useVariableServiceGetVariablesKey, ...(queryKey ?? [{ limit, offset, orderBy, variableKeyPattern }])]; -export type DagVersionServiceGetDagVersionDefaultResponse = Awaited>; -export type DagVersionServiceGetDagVersionQueryResult = UseQueryResult; +export const UseVariableServiceGetVariablesKeyFn = ( + { + limit, + offset, + orderBy, + variableKeyPattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + variableKeyPattern?: string; + } = {}, + queryKey?: Array, +) => [useVariableServiceGetVariablesKey, ...(queryKey ?? [{ limit, offset, orderBy, variableKeyPattern }])]; +export type DagVersionServiceGetDagVersionDefaultResponse = Awaited< + ReturnType +>; +export type DagVersionServiceGetDagVersionQueryResult< + TData = DagVersionServiceGetDagVersionDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagVersionServiceGetDagVersionKey = "DagVersionServiceGetDagVersion"; -export const UseDagVersionServiceGetDagVersionKeyFn = ({ dagId, versionNumber }: { - dagId: string; - versionNumber: number; -}, queryKey?: Array) => [useDagVersionServiceGetDagVersionKey, ...(queryKey ?? [{ dagId, versionNumber }])]; -export type DagVersionServiceGetDagVersionsDefaultResponse = Awaited>; -export type DagVersionServiceGetDagVersionsQueryResult = UseQueryResult; +export const UseDagVersionServiceGetDagVersionKeyFn = ( + { + dagId, + versionNumber, + }: { + dagId: string; + versionNumber: number; + }, + queryKey?: Array, +) => [useDagVersionServiceGetDagVersionKey, ...(queryKey ?? [{ dagId, versionNumber }])]; +export type DagVersionServiceGetDagVersionsDefaultResponse = Awaited< + ReturnType +>; +export type DagVersionServiceGetDagVersionsQueryResult< + TData = DagVersionServiceGetDagVersionsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDagVersionServiceGetDagVersionsKey = "DagVersionServiceGetDagVersions"; -export const UseDagVersionServiceGetDagVersionsKeyFn = ({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }: { - bundleName?: string; - bundleVersion?: string; - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - versionNumber?: number; -}, queryKey?: Array) => [useDagVersionServiceGetDagVersionsKey, ...(queryKey ?? [{ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }])]; +export const UseDagVersionServiceGetDagVersionsKeyFn = ( + { + bundleName, + bundleVersion, + dagId, + limit, + offset, + orderBy, + versionNumber, + }: { + bundleName?: string; + bundleVersion?: string; + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + versionNumber?: number; + }, + queryKey?: Array, +) => [ + useDagVersionServiceGetDagVersionsKey, + ...(queryKey ?? [{ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }]), +]; export type MonitorServiceGetHealthDefaultResponse = Awaited>; -export type MonitorServiceGetHealthQueryResult = UseQueryResult; +export type MonitorServiceGetHealthQueryResult< + TData = MonitorServiceGetHealthDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useMonitorServiceGetHealthKey = "MonitorServiceGetHealth"; -export const UseMonitorServiceGetHealthKeyFn = (queryKey?: Array) => [useMonitorServiceGetHealthKey, ...(queryKey ?? [])]; +export const UseMonitorServiceGetHealthKeyFn = (queryKey?: Array) => [ + useMonitorServiceGetHealthKey, + ...(queryKey ?? []), +]; export type VersionServiceGetVersionDefaultResponse = Awaited>; -export type VersionServiceGetVersionQueryResult = UseQueryResult; +export type VersionServiceGetVersionQueryResult< + TData = VersionServiceGetVersionDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useVersionServiceGetVersionKey = "VersionServiceGetVersion"; -export const UseVersionServiceGetVersionKeyFn = (queryKey?: Array) => [useVersionServiceGetVersionKey, ...(queryKey ?? [])]; +export const UseVersionServiceGetVersionKeyFn = (queryKey?: Array) => [ + useVersionServiceGetVersionKey, + ...(queryKey ?? []), +]; export type LoginServiceLoginDefaultResponse = Awaited>; -export type LoginServiceLoginQueryResult = UseQueryResult; +export type LoginServiceLoginQueryResult< + TData = LoginServiceLoginDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useLoginServiceLoginKey = "LoginServiceLogin"; -export const UseLoginServiceLoginKeyFn = ({ next }: { - next?: string; -} = {}, queryKey?: Array) => [useLoginServiceLoginKey, ...(queryKey ?? [{ next }])]; +export const UseLoginServiceLoginKeyFn = ( + { + next, + }: { + next?: string; + } = {}, + queryKey?: Array, +) => [useLoginServiceLoginKey, ...(queryKey ?? [{ next }])]; export type LoginServiceLogoutDefaultResponse = Awaited>; -export type LoginServiceLogoutQueryResult = UseQueryResult; +export type LoginServiceLogoutQueryResult< + TData = LoginServiceLogoutDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useLoginServiceLogoutKey = "LoginServiceLogout"; -export const UseLoginServiceLogoutKeyFn = ({ next }: { - next?: string; -} = {}, queryKey?: Array) => [useLoginServiceLogoutKey, ...(queryKey ?? [{ next }])]; -export type AuthLinksServiceGetAuthMenusDefaultResponse = Awaited>; -export type AuthLinksServiceGetAuthMenusQueryResult = UseQueryResult; +export const UseLoginServiceLogoutKeyFn = ( + { + next, + }: { + next?: string; + } = {}, + queryKey?: Array, +) => [useLoginServiceLogoutKey, ...(queryKey ?? [{ next }])]; +export type AuthLinksServiceGetAuthMenusDefaultResponse = Awaited< + ReturnType +>; +export type AuthLinksServiceGetAuthMenusQueryResult< + TData = AuthLinksServiceGetAuthMenusDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useAuthLinksServiceGetAuthMenusKey = "AuthLinksServiceGetAuthMenus"; -export const UseAuthLinksServiceGetAuthMenusKeyFn = (queryKey?: Array) => [useAuthLinksServiceGetAuthMenusKey, ...(queryKey ?? [])]; -export type DependenciesServiceGetDependenciesDefaultResponse = Awaited>; -export type DependenciesServiceGetDependenciesQueryResult = UseQueryResult; +export const UseAuthLinksServiceGetAuthMenusKeyFn = (queryKey?: Array) => [ + useAuthLinksServiceGetAuthMenusKey, + ...(queryKey ?? []), +]; +export type DependenciesServiceGetDependenciesDefaultResponse = Awaited< + ReturnType +>; +export type DependenciesServiceGetDependenciesQueryResult< + TData = DependenciesServiceGetDependenciesDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDependenciesServiceGetDependenciesKey = "DependenciesServiceGetDependencies"; -export const UseDependenciesServiceGetDependenciesKeyFn = ({ nodeId }: { - nodeId?: string; -} = {}, queryKey?: Array) => [useDependenciesServiceGetDependenciesKey, ...(queryKey ?? [{ nodeId }])]; -export type DashboardServiceHistoricalMetricsDefaultResponse = Awaited>; -export type DashboardServiceHistoricalMetricsQueryResult = UseQueryResult; +export const UseDependenciesServiceGetDependenciesKeyFn = ( + { + nodeId, + }: { + nodeId?: string; + } = {}, + queryKey?: Array, +) => [useDependenciesServiceGetDependenciesKey, ...(queryKey ?? [{ nodeId }])]; +export type DashboardServiceHistoricalMetricsDefaultResponse = Awaited< + ReturnType +>; +export type DashboardServiceHistoricalMetricsQueryResult< + TData = DashboardServiceHistoricalMetricsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDashboardServiceHistoricalMetricsKey = "DashboardServiceHistoricalMetrics"; -export const UseDashboardServiceHistoricalMetricsKeyFn = ({ endDate, startDate }: { - endDate?: string; - startDate: string; -}, queryKey?: Array) => [useDashboardServiceHistoricalMetricsKey, ...(queryKey ?? [{ endDate, startDate }])]; +export const UseDashboardServiceHistoricalMetricsKeyFn = ( + { + endDate, + startDate, + }: { + endDate?: string; + startDate: string; + }, + queryKey?: Array, +) => [useDashboardServiceHistoricalMetricsKey, ...(queryKey ?? [{ endDate, startDate }])]; export type DashboardServiceDagStatsDefaultResponse = Awaited>; -export type DashboardServiceDagStatsQueryResult = UseQueryResult; +export type DashboardServiceDagStatsQueryResult< + TData = DashboardServiceDagStatsDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useDashboardServiceDagStatsKey = "DashboardServiceDagStats"; -export const UseDashboardServiceDagStatsKeyFn = (queryKey?: Array) => [useDashboardServiceDagStatsKey, ...(queryKey ?? [])]; -export type StructureServiceStructureDataDefaultResponse = Awaited>; -export type StructureServiceStructureDataQueryResult = UseQueryResult; +export const UseDashboardServiceDagStatsKeyFn = (queryKey?: Array) => [ + useDashboardServiceDagStatsKey, + ...(queryKey ?? []), +]; +export type StructureServiceStructureDataDefaultResponse = Awaited< + ReturnType +>; +export type StructureServiceStructureDataQueryResult< + TData = StructureServiceStructureDataDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useStructureServiceStructureDataKey = "StructureServiceStructureData"; -export const UseStructureServiceStructureDataKeyFn = ({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }: { - dagId: string; - externalDependencies?: boolean; - includeDownstream?: boolean; - includeUpstream?: boolean; - root?: string; - versionNumber?: number; -}, queryKey?: Array) => [useStructureServiceStructureDataKey, ...(queryKey ?? [{ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }])]; +export const UseStructureServiceStructureDataKeyFn = ( + { + dagId, + externalDependencies, + includeDownstream, + includeUpstream, + root, + versionNumber, + }: { + dagId: string; + externalDependencies?: boolean; + includeDownstream?: boolean; + includeUpstream?: boolean; + root?: string; + versionNumber?: number; + }, + queryKey?: Array, +) => [ + useStructureServiceStructureDataKey, + ...(queryKey ?? [{ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }]), +]; export type GridServiceGridDataDefaultResponse = Awaited>; -export type GridServiceGridDataQueryResult = UseQueryResult; +export type GridServiceGridDataQueryResult< + TData = GridServiceGridDataDefaultResponse, + TError = unknown, +> = UseQueryResult; export const useGridServiceGridDataKey = "GridServiceGridData"; -export const UseGridServiceGridDataKeyFn = ({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; -}, queryKey?: Array) => [useGridServiceGridDataKey, ...(queryKey ?? [{ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }])]; -export type AssetServiceCreateAssetEventMutationResult = Awaited>; -export type AssetServiceMaterializeAssetMutationResult = Awaited>; -export type BackfillServiceCreateBackfillMutationResult = Awaited>; -export type BackfillServiceCreateBackfillDryRunMutationResult = Awaited>; -export type ConnectionServicePostConnectionMutationResult = Awaited>; -export type ConnectionServiceTestConnectionMutationResult = Awaited>; -export type ConnectionServiceCreateDefaultConnectionsMutationResult = Awaited>; +export const UseGridServiceGridDataKeyFn = ( + { + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }: { + dagId: string; + includeDownstream?: boolean; + includeUpstream?: boolean; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + root?: string; + runAfterGte?: string; + runAfterLte?: string; + runType?: string[]; + state?: string[]; + }, + queryKey?: Array, +) => [ + useGridServiceGridDataKey, + ...(queryKey ?? [ + { + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }, + ]), +]; +export type AssetServiceCreateAssetEventMutationResult = Awaited< + ReturnType +>; +export type AssetServiceMaterializeAssetMutationResult = Awaited< + ReturnType +>; +export type BackfillServiceCreateBackfillMutationResult = Awaited< + ReturnType +>; +export type BackfillServiceCreateBackfillDryRunMutationResult = Awaited< + ReturnType +>; +export type ConnectionServicePostConnectionMutationResult = Awaited< + ReturnType +>; +export type ConnectionServiceTestConnectionMutationResult = Awaited< + ReturnType +>; +export type ConnectionServiceCreateDefaultConnectionsMutationResult = Awaited< + ReturnType +>; export type DagRunServiceClearDagRunMutationResult = Awaited>; -export type DagRunServiceTriggerDagRunMutationResult = Awaited>; -export type DagRunServiceGetListDagRunsBatchMutationResult = Awaited>; -export type TaskInstanceServiceGetTaskInstancesBatchMutationResult = Awaited>; -export type TaskInstanceServicePostClearTaskInstancesMutationResult = Awaited>; +export type DagRunServiceTriggerDagRunMutationResult = Awaited< + ReturnType +>; +export type DagRunServiceGetListDagRunsBatchMutationResult = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetTaskInstancesBatchMutationResult = Awaited< + ReturnType +>; +export type TaskInstanceServicePostClearTaskInstancesMutationResult = Awaited< + ReturnType +>; export type PoolServicePostPoolMutationResult = Awaited>; -export type XcomServiceCreateXcomEntryMutationResult = Awaited>; -export type VariableServicePostVariableMutationResult = Awaited>; -export type BackfillServicePauseBackfillMutationResult = Awaited>; -export type BackfillServiceUnpauseBackfillMutationResult = Awaited>; -export type BackfillServiceCancelBackfillMutationResult = Awaited>; -export type DagParsingServiceReparseDagFileMutationResult = Awaited>; -export type ConnectionServicePatchConnectionMutationResult = Awaited>; -export type ConnectionServiceBulkConnectionsMutationResult = Awaited>; +export type XcomServiceCreateXcomEntryMutationResult = Awaited< + ReturnType +>; +export type VariableServicePostVariableMutationResult = Awaited< + ReturnType +>; +export type BackfillServicePauseBackfillMutationResult = Awaited< + ReturnType +>; +export type BackfillServiceUnpauseBackfillMutationResult = Awaited< + ReturnType +>; +export type BackfillServiceCancelBackfillMutationResult = Awaited< + ReturnType +>; +export type DagParsingServiceReparseDagFileMutationResult = Awaited< + ReturnType +>; +export type ConnectionServicePatchConnectionMutationResult = Awaited< + ReturnType +>; +export type ConnectionServiceBulkConnectionsMutationResult = Awaited< + ReturnType +>; export type DagRunServicePatchDagRunMutationResult = Awaited>; export type DagServicePatchDagsMutationResult = Awaited>; export type DagServicePatchDagMutationResult = Awaited>; -export type TaskInstanceServicePatchTaskInstanceMutationResult = Awaited>; -export type TaskInstanceServicePatchTaskInstanceByMapIndexMutationResult = Awaited>; -export type TaskInstanceServicePatchTaskInstanceDryRunByMapIndexMutationResult = Awaited>; -export type TaskInstanceServicePatchTaskInstanceDryRunMutationResult = Awaited>; +export type TaskInstanceServicePatchTaskInstanceMutationResult = Awaited< + ReturnType +>; +export type TaskInstanceServicePatchTaskInstanceByMapIndexMutationResult = Awaited< + ReturnType +>; +export type TaskInstanceServicePatchTaskInstanceDryRunByMapIndexMutationResult = Awaited< + ReturnType +>; +export type TaskInstanceServicePatchTaskInstanceDryRunMutationResult = Awaited< + ReturnType +>; export type PoolServicePatchPoolMutationResult = Awaited>; export type PoolServiceBulkPoolsMutationResult = Awaited>; -export type XcomServiceUpdateXcomEntryMutationResult = Awaited>; -export type VariableServicePatchVariableMutationResult = Awaited>; -export type VariableServiceBulkVariablesMutationResult = Awaited>; -export type AssetServiceDeleteAssetQueuedEventsMutationResult = Awaited>; -export type AssetServiceDeleteDagAssetQueuedEventsMutationResult = Awaited>; -export type AssetServiceDeleteDagAssetQueuedEventMutationResult = Awaited>; -export type ConnectionServiceDeleteConnectionMutationResult = Awaited>; +export type XcomServiceUpdateXcomEntryMutationResult = Awaited< + ReturnType +>; +export type VariableServicePatchVariableMutationResult = Awaited< + ReturnType +>; +export type VariableServiceBulkVariablesMutationResult = Awaited< + ReturnType +>; +export type AssetServiceDeleteAssetQueuedEventsMutationResult = Awaited< + ReturnType +>; +export type AssetServiceDeleteDagAssetQueuedEventsMutationResult = Awaited< + ReturnType +>; +export type AssetServiceDeleteDagAssetQueuedEventMutationResult = Awaited< + ReturnType +>; +export type ConnectionServiceDeleteConnectionMutationResult = Awaited< + ReturnType +>; export type DagRunServiceDeleteDagRunMutationResult = Awaited>; export type DagServiceDeleteDagMutationResult = Awaited>; export type PoolServiceDeletePoolMutationResult = Awaited>; -export type VariableServiceDeleteVariableMutationResult = Awaited>; +export type VariableServiceDeleteVariableMutationResult = Awaited< + ReturnType +>; diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts index 23561c78027ef..f02690c160ba0 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/ensureQueryData.ts @@ -1,1234 +1,2529 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 - +// generated with @7nohe/openapi-react-query-codegen@1.6.2 import { type QueryClient } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; + +import { + AssetService, + AuthLinksService, + BackfillService, + ConfigService, + ConnectionService, + DagReportService, + DagRunService, + DagService, + DagSourceService, + DagStatsService, + DagVersionService, + DagWarningService, + DashboardService, + DependenciesService, + EventLogService, + ExtraLinksService, + GridService, + ImportErrorService, + JobService, + LoginService, + MonitorService, + PluginService, + PoolService, + ProviderService, + StructureService, + TaskInstanceService, + TaskService, + VariableService, + VersionService, + XcomService, +} from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; + /** -* Get Assets -* Get assets. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.namePattern -* @param data.uriPattern -* @param data.dagIds -* @param data.onlyActive -* @param data.orderBy -* @returns AssetCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseAssetServiceGetAssetsData = (queryClient: QueryClient, { dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }: { - dagIds?: string[]; - limit?: number; - namePattern?: string; - offset?: number; - onlyActive?: boolean; - orderBy?: string; - uriPattern?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetAssetsKeyFn({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }), queryFn: () => AssetService.getAssets({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }) }); -/** -* Get Asset Aliases -* Get asset aliases. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.namePattern -* @param data.orderBy -* @returns AssetAliasCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseAssetServiceGetAssetAliasesData = (queryClient: QueryClient, { limit, namePattern, offset, orderBy }: { - limit?: number; - namePattern?: string; - offset?: number; - orderBy?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }), queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }) }); -/** -* Get Asset Alias -* Get an asset alias. -* @param data The data for the request. -* @param data.assetAliasId -* @returns unknown Successful Response -* @throws ApiError -*/ -export const ensureUseAssetServiceGetAssetAliasData = (queryClient: QueryClient, { assetAliasId }: { - assetAliasId: number; -}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }), queryFn: () => AssetService.getAssetAlias({ assetAliasId }) }); -/** -* Get Asset Events -* Get asset events. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.assetId -* @param data.sourceDagId -* @param data.sourceTaskId -* @param data.sourceRunId -* @param data.sourceMapIndex -* @param data.timestampGte -* @param data.timestampLte -* @returns AssetEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseAssetServiceGetAssetEventsData = (queryClient: QueryClient, { assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }: { - assetId?: number; - limit?: number; - offset?: number; - orderBy?: string; - sourceDagId?: string; - sourceMapIndex?: number; - sourceRunId?: string; - sourceTaskId?: string; - timestampGte?: string; - timestampLte?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetAssetEventsKeyFn({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }), queryFn: () => AssetService.getAssetEvents({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }) }); -/** -* Get Asset Queued Events -* Get queued asset events for an asset. -* @param data The data for the request. -* @param data.assetId -* @param data.before -* @returns QueuedEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseAssetServiceGetAssetQueuedEventsData = (queryClient: QueryClient, { assetId, before }: { - assetId: number; - before?: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }), queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }) }); -/** -* Get Asset -* Get an asset. -* @param data The data for the request. -* @param data.assetId -* @returns AssetResponse Successful Response -* @throws ApiError -*/ -export const ensureUseAssetServiceGetAssetData = (queryClient: QueryClient, { assetId }: { - assetId: number; -}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }), queryFn: () => AssetService.getAsset({ assetId }) }); -/** -* Get Dag Asset Queued Events -* Get queued asset events for a DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.before -* @returns QueuedEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseAssetServiceGetDagAssetQueuedEventsData = (queryClient: QueryClient, { before, dagId }: { - before?: string; - dagId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }), queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }) }); -/** -* Get Dag Asset Queued Event -* Get a queued asset event for a DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.assetId -* @param data.before -* @returns QueuedEventResponse Successful Response -* @throws ApiError -*/ -export const ensureUseAssetServiceGetDagAssetQueuedEventData = (queryClient: QueryClient, { assetId, before, dagId }: { - assetId: number; - before?: string; - dagId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }), queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }) }); -/** -* Next Run Assets -* @param data The data for the request. -* @param data.dagId -* @returns unknown Successful Response -* @throws ApiError -*/ -export const ensureUseAssetServiceNextRunAssetsData = (queryClient: QueryClient, { dagId }: { - dagId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }), queryFn: () => AssetService.nextRunAssets({ dagId }) }); -/** -* List Backfills -* @param data The data for the request. -* @param data.dagId -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns BackfillCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseBackfillServiceListBackfillsData = (queryClient: QueryClient, { dagId, limit, offset, orderBy }: { - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }), queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }) }); -/** -* Get Backfill -* @param data The data for the request. -* @param data.backfillId -* @returns BackfillResponse Successful Response -* @throws ApiError -*/ -export const ensureUseBackfillServiceGetBackfillData = (queryClient: QueryClient, { backfillId }: { - backfillId: number; -}) => queryClient.ensureQueryData({ queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }), queryFn: () => BackfillService.getBackfill({ backfillId }) }); -/** -* List Backfills -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.dagId -* @param data.active -* @returns BackfillCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseBackfillServiceListBackfills1Data = (queryClient: QueryClient, { active, dagId, limit, offset, orderBy }: { - active?: boolean; - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseBackfillServiceListBackfills1KeyFn({ active, dagId, limit, offset, orderBy }), queryFn: () => BackfillService.listBackfills1({ active, dagId, limit, offset, orderBy }) }); -/** -* Get Connection -* Get a connection entry. -* @param data The data for the request. -* @param data.connectionId -* @returns ConnectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseConnectionServiceGetConnectionData = (queryClient: QueryClient, { connectionId }: { - connectionId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }), queryFn: () => ConnectionService.getConnection({ connectionId }) }); -/** -* Get Connections -* Get all connection entries. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.connectionIdPattern -* @returns ConnectionCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseConnectionServiceGetConnectionsData = (queryClient: QueryClient, { connectionIdPattern, limit, offset, orderBy }: { - connectionIdPattern?: string; - limit?: number; - offset?: number; - orderBy?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ connectionIdPattern, limit, offset, orderBy }), queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }) }); -/** -* Hook Meta Data -* Retrieve information about available connection types (hook classes) and their parameters. -* @returns ConnectionHookMetaData Successful Response -* @throws ApiError -*/ -export const ensureUseConnectionServiceHookMetaDataData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(), queryFn: () => ConnectionService.hookMetaData() }); -/** -* Get Dag Run -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @returns DAGRunResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagRunServiceGetDagRunData = (queryClient: QueryClient, { dagId, dagRunId }: { - dagId: string; - dagRunId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) }); -/** -* Get Upstream Asset Events -* If dag run is asset-triggered, return the asset events that triggered it. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @returns AssetEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagRunServiceGetUpstreamAssetEventsData = (queryClient: QueryClient, { dagId, dagRunId }: { - dagId: string; - dagRunId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }), queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }) }); -/** -* Get Dag Runs -* Get all DAG Runs. -* -* This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. -* @param data The data for the request. -* @param data.dagId -* @param data.limit -* @param data.offset -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.updatedAtGte -* @param data.updatedAtLte -* @param data.runType -* @param data.state -* @param data.orderBy -* @returns DAGRunCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagRunServiceGetDagRunsData = (queryClient: QueryClient, { dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { - dagId: string; - endDateGte?: string; - endDateLte?: string; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - updatedAtGte?: string; - updatedAtLte?: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }) }); -/** -* Get Dag Source -* Get source code using file token. -* @param data The data for the request. -* @param data.dagId -* @param data.versionNumber -* @param data.accept -* @returns DAGSourceResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagSourceServiceGetDagSourceData = (queryClient: QueryClient, { accept, dagId, versionNumber }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - versionNumber?: number; -}) => queryClient.ensureQueryData({ queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }), queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }) }); -/** -* Get Dag Stats -* Get Dag statistics. -* @param data The data for the request. -* @param data.dagIds -* @returns DagStatsCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagStatsServiceGetDagStatsData = (queryClient: QueryClient, { dagIds }: { - dagIds?: string[]; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }), queryFn: () => DagStatsService.getDagStats({ dagIds }) }); -/** -* Get Dag Reports -* Get DAG report. -* @param data The data for the request. -* @param data.subdir -* @returns unknown Successful Response -* @throws ApiError -*/ -export const ensureUseDagReportServiceGetDagReportsData = (queryClient: QueryClient, { subdir }: { - subdir: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }), queryFn: () => DagReportService.getDagReports({ subdir }) }); -/** -* Get Config -* @param data The data for the request. -* @param data.section -* @param data.accept -* @returns Config Successful Response -* @throws ApiError -*/ -export const ensureUseConfigServiceGetConfigData = (queryClient: QueryClient, { accept, section }: { - accept?: "application/json" | "text/plain" | "*/*"; - section?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }), queryFn: () => ConfigService.getConfig({ accept, section }) }); -/** -* Get Config Value -* @param data The data for the request. -* @param data.section -* @param data.option -* @param data.accept -* @returns Config Successful Response -* @throws ApiError -*/ -export const ensureUseConfigServiceGetConfigValueData = (queryClient: QueryClient, { accept, option, section }: { - accept?: "application/json" | "text/plain" | "*/*"; - option: string; - section: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }), queryFn: () => ConfigService.getConfigValue({ accept, option, section }) }); -/** -* Get Configs -* Get configs for UI. -* @returns ConfigResponse Successful Response -* @throws ApiError -*/ -export const ensureUseConfigServiceGetConfigsData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseConfigServiceGetConfigsKeyFn(), queryFn: () => ConfigService.getConfigs() }); -/** -* List Dag Warnings -* Get a list of DAG warnings. -* @param data The data for the request. -* @param data.dagId -* @param data.warningType -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns DAGWarningCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagWarningServiceListDagWarningsData = (queryClient: QueryClient, { dagId, limit, offset, orderBy, warningType }: { - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - warningType?: DagWarningType; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ dagId, limit, offset, orderBy, warningType }), queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }) }); -/** -* Get Dags -* Get all DAGs. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.tags -* @param data.tagsMatchMode -* @param data.owners -* @param data.dagIdPattern -* @param data.dagDisplayNamePattern -* @param data.excludeStale -* @param data.paused -* @param data.lastDagRunState -* @param data.dagRunStartDateGte -* @param data.dagRunStartDateLte -* @param data.dagRunEndDateGte -* @param data.dagRunEndDateLte -* @param data.dagRunState -* @param data.orderBy -* @returns DAGCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagServiceGetDagsData = (queryClient: QueryClient, { dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagRunEndDateGte?: string; - dagRunEndDateLte?: string; - dagRunStartDateGte?: string; - dagRunStartDateLte?: string; - dagRunState?: string[]; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - orderBy?: string; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDags({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); -/** -* Get Dag -* Get basic information about a DAG. -* @param data The data for the request. -* @param data.dagId -* @returns DAGResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagServiceGetDagData = (queryClient: QueryClient, { dagId }: { - dagId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }), queryFn: () => DagService.getDag({ dagId }) }); -/** -* Get Dag Details -* Get details of DAG. -* @param data The data for the request. -* @param data.dagId -* @returns DAGDetailsResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagServiceGetDagDetailsData = (queryClient: QueryClient, { dagId }: { - dagId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }), queryFn: () => DagService.getDagDetails({ dagId }) }); -/** -* Get Dag Tags -* Get all DAG tags. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.tagNamePattern -* @returns DAGTagCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagServiceGetDagTagsData = (queryClient: QueryClient, { limit, offset, orderBy, tagNamePattern }: { - limit?: number; - offset?: number; - orderBy?: string; - tagNamePattern?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }), queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }) }); -/** -* Recent Dag Runs -* Get recent DAG runs. -* @param data The data for the request. -* @param data.dagRunsLimit -* @param data.limit -* @param data.offset -* @param data.tags -* @param data.tagsMatchMode -* @param data.owners -* @param data.dagIds -* @param data.dagIdPattern -* @param data.dagDisplayNamePattern -* @param data.excludeStale -* @param data.paused -* @param data.lastDagRunState -* @returns DAGWithLatestDagRunsCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagServiceRecentDagRunsData = (queryClient: QueryClient, { dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagIds?: string[]; - dagRunsLimit?: number; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDagServiceRecentDagRunsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.recentDagRuns({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }) }); -/** -* Get Event Log -* @param data The data for the request. -* @param data.eventLogId -* @returns EventLogResponse Successful Response -* @throws ApiError -*/ -export const ensureUseEventLogServiceGetEventLogData = (queryClient: QueryClient, { eventLogId }: { - eventLogId: number; -}) => queryClient.ensureQueryData({ queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }), queryFn: () => EventLogService.getEventLog({ eventLogId }) }); -/** -* Get Event Logs -* Get all Event Logs. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.dagId -* @param data.taskId -* @param data.runId -* @param data.mapIndex -* @param data.tryNumber -* @param data.owner -* @param data.event -* @param data.excludedEvents -* @param data.includedEvents -* @param data.before -* @param data.after -* @returns EventLogCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseEventLogServiceGetEventLogsData = (queryClient: QueryClient, { after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }: { - after?: string; - before?: string; - dagId?: string; - event?: string; - excludedEvents?: string[]; - includedEvents?: string[]; - limit?: number; - mapIndex?: number; - offset?: number; - orderBy?: string; - owner?: string; - runId?: string; - taskId?: string; - tryNumber?: number; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }), queryFn: () => EventLogService.getEventLogs({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }) }); -/** -* Get Extra Links -* Get extra links for task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns ExtraLinkCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseExtraLinksServiceGetExtraLinksData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Extra Links -* Get extra links for task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns ExtraLinkCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskInstanceServiceGetExtraLinksData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Task Instance -* Get task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @returns TaskInstanceResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskInstanceServiceGetTaskInstanceData = (queryClient: QueryClient, { dagId, dagRunId, taskId }: { - dagId: string; - dagRunId: string; - taskId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }), queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) }); -/** -* Get Mapped Task Instances -* Get list of mapped task instances. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.updatedAtGte -* @param data.updatedAtLte -* @param data.durationGte -* @param data.durationLte -* @param data.state -* @param data.pool -* @param data.queue -* @param data.executor -* @param data.versionNumber -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskInstanceServiceGetMappedTaskInstancesData = (queryClient: QueryClient, { dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskId: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }), queryFn: () => TaskInstanceService.getMappedTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }) }); -/** -* Get Task Instance Dependencies -* Get dependencies blocking task from getting scheduled. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskDependencyCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getTaskInstanceDependenciesByMapIndex({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Task Instance Dependencies -* Get dependencies blocking task from getting scheduled. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskDependencyCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskInstanceServiceGetTaskInstanceDependenciesData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Task Instance Tries -* Get list of task instances history. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskInstanceHistoryCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskInstanceServiceGetTaskInstanceTriesData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Mapped Task Instance Tries -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskInstanceHistoryCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskInstanceServiceGetMappedTaskInstanceTriesData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Mapped Task Instance -* Get task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskInstanceResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskInstanceServiceGetMappedTaskInstanceData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Task Instances -* Get list of task instances. -* -* This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs -* and DAG runs. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.updatedAtGte -* @param data.updatedAtLte -* @param data.durationGte -* @param data.durationLte -* @param data.taskDisplayNamePattern -* @param data.state -* @param data.pool -* @param data.queue -* @param data.executor -* @param data.versionNumber -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskInstanceServiceGetTaskInstancesData = (queryClient: QueryClient, { dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskDisplayNamePattern?: string; - taskId?: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }), queryFn: () => TaskInstanceService.getTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }) }); -/** -* Get Task Instance Try Details -* Get task instance details by try number. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.taskTryNumber -* @param data.mapIndex -* @returns TaskInstanceHistoryResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskInstanceServiceGetTaskInstanceTryDetailsData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - taskTryNumber: number; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }), queryFn: () => TaskInstanceService.getTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) }); -/** -* Get Mapped Task Instance Try Details -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.taskTryNumber -* @param data.mapIndex -* @returns TaskInstanceHistoryResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskInstanceServiceGetMappedTaskInstanceTryDetailsData = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - taskTryNumber: number; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }), queryFn: () => TaskInstanceService.getMappedTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) }); -/** -* Get Log -* Get logs for a specific task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.tryNumber -* @param data.fullContent -* @param data.mapIndex -* @param data.token -* @param data.accept -* @returns TaskInstancesLogResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskInstanceServiceGetLogData = (queryClient: QueryClient, { accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }: { - accept?: "application/json" | "*/*" | "application/x-ndjson"; - dagId: string; - dagRunId: string; - fullContent?: boolean; - mapIndex?: number; - taskId: string; - token?: string; - tryNumber: number; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskInstanceServiceGetLogKeyFn({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }), queryFn: () => TaskInstanceService.getLog({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }) }); -/** -* Get Import Error -* Get an import error. -* @param data The data for the request. -* @param data.importErrorId -* @returns ImportErrorResponse Successful Response -* @throws ApiError -*/ -export const ensureUseImportErrorServiceGetImportErrorData = (queryClient: QueryClient, { importErrorId }: { - importErrorId: number; -}) => queryClient.ensureQueryData({ queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }), queryFn: () => ImportErrorService.getImportError({ importErrorId }) }); -/** -* Get Import Errors -* Get all import errors. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns ImportErrorCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseImportErrorServiceGetImportErrorsData = (queryClient: QueryClient, { limit, offset, orderBy }: { - limit?: number; - offset?: number; - orderBy?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }), queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }) }); -/** -* Get Jobs -* Get all jobs. -* @param data The data for the request. -* @param data.isAlive -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.jobState -* @param data.jobType -* @param data.hostname -* @param data.executorClass -* @returns JobCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseJobServiceGetJobsData = (queryClient: QueryClient, { endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }: { - endDateGte?: string; - endDateLte?: string; - executorClass?: string; - hostname?: string; - isAlive?: boolean; - jobState?: string; - jobType?: string; - limit?: number; - offset?: number; - orderBy?: string; - startDateGte?: string; - startDateLte?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseJobServiceGetJobsKeyFn({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }), queryFn: () => JobService.getJobs({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }) }); -/** -* Get Plugins -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @returns PluginCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUsePluginServiceGetPluginsData = (queryClient: QueryClient, { limit, offset }: { - limit?: number; - offset?: number; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }), queryFn: () => PluginService.getPlugins({ limit, offset }) }); -/** -* Get Pool -* Get a pool. -* @param data The data for the request. -* @param data.poolName -* @returns PoolResponse Successful Response -* @throws ApiError -*/ -export const ensureUsePoolServiceGetPoolData = (queryClient: QueryClient, { poolName }: { - poolName: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }), queryFn: () => PoolService.getPool({ poolName }) }); -/** -* Get Pools -* Get all pools entries. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.poolNamePattern -* @returns PoolCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUsePoolServiceGetPoolsData = (queryClient: QueryClient, { limit, offset, orderBy, poolNamePattern }: { - limit?: number; - offset?: number; - orderBy?: string; - poolNamePattern?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }), queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }) }); -/** -* Get Providers -* Get providers. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @returns ProviderCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseProviderServiceGetProvidersData = (queryClient: QueryClient, { limit, offset }: { - limit?: number; - offset?: number; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }), queryFn: () => ProviderService.getProviders({ limit, offset }) }); -/** -* Get Xcom Entry -* Get an XCom entry. -* @param data The data for the request. -* @param data.dagId -* @param data.taskId -* @param data.dagRunId -* @param data.xcomKey -* @param data.mapIndex -* @param data.deserialize -* @param data.stringify -* @returns unknown Successful Response -* @throws ApiError -*/ -export const ensureUseXcomServiceGetXcomEntryData = (queryClient: QueryClient, { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }: { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseXcomServiceGetXcomEntryKeyFn({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }), queryFn: () => XcomService.getXcomEntry({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }) }); -/** -* Get Xcom Entries -* Get all XCom entries. -* -* This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.xcomKey -* @param data.mapIndex -* @param data.limit -* @param data.offset -* @returns XComCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseXcomServiceGetXcomEntriesData = (queryClient: QueryClient, { dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }: { - dagId: string; - dagRunId: string; - limit?: number; - mapIndex?: number; - offset?: number; - taskId: string; - xcomKey?: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }), queryFn: () => XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }) }); -/** -* Get Tasks -* Get tasks for DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.orderBy -* @returns TaskCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskServiceGetTasksData = (queryClient: QueryClient, { dagId, orderBy }: { - dagId: string; - orderBy?: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }), queryFn: () => TaskService.getTasks({ dagId, orderBy }) }); -/** -* Get Task -* Get simplified representation of a task. -* @param data The data for the request. -* @param data.dagId -* @param data.taskId -* @returns TaskResponse Successful Response -* @throws ApiError -*/ -export const ensureUseTaskServiceGetTaskData = (queryClient: QueryClient, { dagId, taskId }: { - dagId: string; - taskId: unknown; -}) => queryClient.ensureQueryData({ queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }), queryFn: () => TaskService.getTask({ dagId, taskId }) }); -/** -* Get Variable -* Get a variable entry. -* @param data The data for the request. -* @param data.variableKey -* @returns VariableResponse Successful Response -* @throws ApiError -*/ -export const ensureUseVariableServiceGetVariableData = (queryClient: QueryClient, { variableKey }: { - variableKey: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }), queryFn: () => VariableService.getVariable({ variableKey }) }); -/** -* Get Variables -* Get all Variables entries. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.variableKeyPattern -* @returns VariableCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseVariableServiceGetVariablesData = (queryClient: QueryClient, { limit, offset, orderBy, variableKeyPattern }: { - limit?: number; - offset?: number; - orderBy?: string; - variableKeyPattern?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseVariableServiceGetVariablesKeyFn({ limit, offset, orderBy, variableKeyPattern }), queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }) }); -/** -* Get Dag Version -* Get one Dag Version. -* @param data The data for the request. -* @param data.dagId -* @param data.versionNumber -* @returns DagVersionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagVersionServiceGetDagVersionData = (queryClient: QueryClient, { dagId, versionNumber }: { - dagId: string; - versionNumber: number; -}) => queryClient.ensureQueryData({ queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }), queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }) }); -/** -* Get Dag Versions -* Get all DAG Versions. -* -* This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. -* @param data The data for the request. -* @param data.dagId -* @param data.limit -* @param data.offset -* @param data.versionNumber -* @param data.bundleName -* @param data.bundleVersion -* @param data.orderBy -* @returns DAGVersionCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDagVersionServiceGetDagVersionsData = (queryClient: QueryClient, { bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }: { - bundleName?: string; - bundleVersion?: string; - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - versionNumber?: number; -}) => queryClient.ensureQueryData({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) }); -/** -* Get Health -* @returns HealthInfoResponse Successful Response -* @throws ApiError -*/ -export const ensureUseMonitorServiceGetHealthData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseMonitorServiceGetHealthKeyFn(), queryFn: () => MonitorService.getHealth() }); -/** -* Get Version -* Get version information. -* @returns VersionInfo Successful Response -* @throws ApiError -*/ -export const ensureUseVersionServiceGetVersionData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseVersionServiceGetVersionKeyFn(), queryFn: () => VersionService.getVersion() }); -/** -* Login -* Redirect to the login URL depending on the AuthManager configured. -* @param data The data for the request. -* @param data.next -* @returns unknown Successful Response -* @throws ApiError -*/ -export const ensureUseLoginServiceLoginData = (queryClient: QueryClient, { next }: { - next?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseLoginServiceLoginKeyFn({ next }), queryFn: () => LoginService.login({ next }) }); -/** -* Logout -* Logout the user. -* @param data The data for the request. -* @param data.next -* @returns unknown Successful Response -* @throws ApiError -*/ -export const ensureUseLoginServiceLogoutData = (queryClient: QueryClient, { next }: { - next?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseLoginServiceLogoutKeyFn({ next }), queryFn: () => LoginService.logout({ next }) }); -/** -* Get Auth Menus -* @returns MenuItemCollectionResponse Successful Response -* @throws ApiError -*/ -export const ensureUseAuthLinksServiceGetAuthMenusData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(), queryFn: () => AuthLinksService.getAuthMenus() }); -/** -* Get Dependencies -* Dependencies graph. -* @param data The data for the request. -* @param data.nodeId -* @returns BaseGraphResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDependenciesServiceGetDependenciesData = (queryClient: QueryClient, { nodeId }: { - nodeId?: string; -} = {}) => queryClient.ensureQueryData({ queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }), queryFn: () => DependenciesService.getDependencies({ nodeId }) }); -/** -* Historical Metrics -* Return cluster activity historical metrics. -* @param data The data for the request. -* @param data.startDate -* @param data.endDate -* @returns HistoricalMetricDataResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDashboardServiceHistoricalMetricsData = (queryClient: QueryClient, { endDate, startDate }: { - endDate?: string; - startDate: string; -}) => queryClient.ensureQueryData({ queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }), queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }) }); -/** -* Dag Stats -* Return basic DAG stats with counts of DAGs in various states. -* @returns DashboardDagStatsResponse Successful Response -* @throws ApiError -*/ -export const ensureUseDashboardServiceDagStatsData = (queryClient: QueryClient) => queryClient.ensureQueryData({ queryKey: Common.UseDashboardServiceDagStatsKeyFn(), queryFn: () => DashboardService.dagStats() }); -/** -* Structure Data -* Get Structure Data. -* @param data The data for the request. -* @param data.dagId -* @param data.includeUpstream -* @param data.includeDownstream -* @param data.root -* @param data.externalDependencies -* @param data.versionNumber -* @returns StructureDataResponse Successful Response -* @throws ApiError -*/ -export const ensureUseStructureServiceStructureDataData = (queryClient: QueryClient, { dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }: { - dagId: string; - externalDependencies?: boolean; - includeDownstream?: boolean; - includeUpstream?: boolean; - root?: string; - versionNumber?: number; -}) => queryClient.ensureQueryData({ queryKey: Common.UseStructureServiceStructureDataKeyFn({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }), queryFn: () => StructureService.structureData({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }) }); -/** -* Grid Data -* Return grid data. -* @param data The data for the request. -* @param data.dagId -* @param data.includeUpstream -* @param data.includeDownstream -* @param data.root -* @param data.offset -* @param data.runType -* @param data.state -* @param data.limit -* @param data.orderBy -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @returns GridResponse Successful Response -* @throws ApiError -*/ -export const ensureUseGridServiceGridDataData = (queryClient: QueryClient, { dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; -}) => queryClient.ensureQueryData({ queryKey: Common.UseGridServiceGridDataKeyFn({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }), queryFn: () => GridService.gridData({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }) }); + * Get Assets + * Get assets. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.namePattern + * @param data.uriPattern + * @param data.dagIds + * @param data.onlyActive + * @param data.orderBy + * @returns AssetCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseAssetServiceGetAssetsData = ( + queryClient: QueryClient, + { + dagIds, + limit, + namePattern, + offset, + onlyActive, + orderBy, + uriPattern, + }: { + dagIds?: string[]; + limit?: number; + namePattern?: string; + offset?: number; + onlyActive?: boolean; + orderBy?: string; + uriPattern?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseAssetServiceGetAssetsKeyFn({ + dagIds, + limit, + namePattern, + offset, + onlyActive, + orderBy, + uriPattern, + }), + queryFn: () => + AssetService.getAssets({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }), + }); +/** + * Get Asset Aliases + * Get asset aliases. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.namePattern + * @param data.orderBy + * @returns AssetAliasCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseAssetServiceGetAssetAliasesData = ( + queryClient: QueryClient, + { + limit, + namePattern, + offset, + orderBy, + }: { + limit?: number; + namePattern?: string; + offset?: number; + orderBy?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }), + queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }), + }); +/** + * Get Asset Alias + * Get an asset alias. + * @param data The data for the request. + * @param data.assetAliasId + * @returns unknown Successful Response + * @throws ApiError + */ +export const ensureUseAssetServiceGetAssetAliasData = ( + queryClient: QueryClient, + { + assetAliasId, + }: { + assetAliasId: number; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }), + queryFn: () => AssetService.getAssetAlias({ assetAliasId }), + }); +/** + * Get Asset Events + * Get asset events. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.assetId + * @param data.sourceDagId + * @param data.sourceTaskId + * @param data.sourceRunId + * @param data.sourceMapIndex + * @param data.timestampGte + * @param data.timestampLte + * @returns AssetEventCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseAssetServiceGetAssetEventsData = ( + queryClient: QueryClient, + { + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }: { + assetId?: number; + limit?: number; + offset?: number; + orderBy?: string; + sourceDagId?: string; + sourceMapIndex?: number; + sourceRunId?: string; + sourceTaskId?: string; + timestampGte?: string; + timestampLte?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseAssetServiceGetAssetEventsKeyFn({ + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }), + queryFn: () => + AssetService.getAssetEvents({ + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }), + }); +/** + * Get Asset Queued Events + * Get queued asset events for an asset. + * @param data The data for the request. + * @param data.assetId + * @param data.before + * @returns QueuedEventCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseAssetServiceGetAssetQueuedEventsData = ( + queryClient: QueryClient, + { + assetId, + before, + }: { + assetId: number; + before?: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }), + queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }), + }); +/** + * Get Asset + * Get an asset. + * @param data The data for the request. + * @param data.assetId + * @returns AssetResponse Successful Response + * @throws ApiError + */ +export const ensureUseAssetServiceGetAssetData = ( + queryClient: QueryClient, + { + assetId, + }: { + assetId: number; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }), + queryFn: () => AssetService.getAsset({ assetId }), + }); +/** + * Get Dag Asset Queued Events + * Get queued asset events for a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.before + * @returns QueuedEventCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseAssetServiceGetDagAssetQueuedEventsData = ( + queryClient: QueryClient, + { + before, + dagId, + }: { + before?: string; + dagId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }), + queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }), + }); +/** + * Get Dag Asset Queued Event + * Get a queued asset event for a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.assetId + * @param data.before + * @returns QueuedEventResponse Successful Response + * @throws ApiError + */ +export const ensureUseAssetServiceGetDagAssetQueuedEventData = ( + queryClient: QueryClient, + { + assetId, + before, + dagId, + }: { + assetId: number; + before?: string; + dagId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }), + queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }), + }); +/** + * Next Run Assets + * @param data The data for the request. + * @param data.dagId + * @returns unknown Successful Response + * @throws ApiError + */ +export const ensureUseAssetServiceNextRunAssetsData = ( + queryClient: QueryClient, + { + dagId, + }: { + dagId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }), + queryFn: () => AssetService.nextRunAssets({ dagId }), + }); +/** + * List Backfills + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns BackfillCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseBackfillServiceListBackfillsData = ( + queryClient: QueryClient, + { + dagId, + limit, + offset, + orderBy, + }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }), + queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }), + }); +/** + * Get Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns BackfillResponse Successful Response + * @throws ApiError + */ +export const ensureUseBackfillServiceGetBackfillData = ( + queryClient: QueryClient, + { + backfillId, + }: { + backfillId: number; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }), + queryFn: () => BackfillService.getBackfill({ backfillId }), + }); +/** + * List Backfills + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.dagId + * @param data.active + * @returns BackfillCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseBackfillServiceListBackfills1Data = ( + queryClient: QueryClient, + { + active, + dagId, + limit, + offset, + orderBy, + }: { + active?: boolean; + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseBackfillServiceListBackfills1KeyFn({ active, dagId, limit, offset, orderBy }), + queryFn: () => BackfillService.listBackfills1({ active, dagId, limit, offset, orderBy }), + }); +/** + * Get Connection + * Get a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseConnectionServiceGetConnectionData = ( + queryClient: QueryClient, + { + connectionId, + }: { + connectionId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }), + queryFn: () => ConnectionService.getConnection({ connectionId }), + }); +/** + * Get Connections + * Get all connection entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.connectionIdPattern + * @returns ConnectionCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseConnectionServiceGetConnectionsData = ( + queryClient: QueryClient, + { + connectionIdPattern, + limit, + offset, + orderBy, + }: { + connectionIdPattern?: string; + limit?: number; + offset?: number; + orderBy?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ connectionIdPattern, limit, offset, orderBy }), + queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }), + }); +/** + * Hook Meta Data + * Retrieve information about available connection types (hook classes) and their parameters. + * @returns ConnectionHookMetaData Successful Response + * @throws ApiError + */ +export const ensureUseConnectionServiceHookMetaDataData = (queryClient: QueryClient) => + queryClient.ensureQueryData({ + queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(), + queryFn: () => ConnectionService.hookMetaData(), + }); +/** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagRunServiceGetDagRunData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }), + }); +/** + * Get Upstream Asset Events + * If dag run is asset-triggered, return the asset events that triggered it. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns AssetEventCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagRunServiceGetUpstreamAssetEventsData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }), + queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }), + }); +/** + * Get Dag Runs + * Get all DAG Runs. + * + * This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.runType + * @param data.state + * @param data.orderBy + * @returns DAGRunCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagRunServiceGetDagRunsData = ( + queryClient: QueryClient, + { + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }: { + dagId: string; + endDateGte?: string; + endDateLte?: string; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + runAfterGte?: string; + runAfterLte?: string; + runType?: string[]; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + updatedAtGte?: string; + updatedAtLte?: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }), + queryFn: () => + DagRunService.getDagRuns({ + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }), + }); +/** + * Get Dag Source + * Get source code using file token. + * @param data The data for the request. + * @param data.dagId + * @param data.versionNumber + * @param data.accept + * @returns DAGSourceResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagSourceServiceGetDagSourceData = ( + queryClient: QueryClient, + { + accept, + dagId, + versionNumber, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + dagId: string; + versionNumber?: number; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }), + queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }), + }); +/** + * Get Dag Stats + * Get Dag statistics. + * @param data The data for the request. + * @param data.dagIds + * @returns DagStatsCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagStatsServiceGetDagStatsData = ( + queryClient: QueryClient, + { + dagIds, + }: { + dagIds?: string[]; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }), + queryFn: () => DagStatsService.getDagStats({ dagIds }), + }); +/** + * Get Dag Reports + * Get DAG report. + * @param data The data for the request. + * @param data.subdir + * @returns unknown Successful Response + * @throws ApiError + */ +export const ensureUseDagReportServiceGetDagReportsData = ( + queryClient: QueryClient, + { + subdir, + }: { + subdir: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }), + queryFn: () => DagReportService.getDagReports({ subdir }), + }); +/** + * Get Config + * @param data The data for the request. + * @param data.section + * @param data.accept + * @returns Config Successful Response + * @throws ApiError + */ +export const ensureUseConfigServiceGetConfigData = ( + queryClient: QueryClient, + { + accept, + section, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + section?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }), + queryFn: () => ConfigService.getConfig({ accept, section }), + }); +/** + * Get Config Value + * @param data The data for the request. + * @param data.section + * @param data.option + * @param data.accept + * @returns Config Successful Response + * @throws ApiError + */ +export const ensureUseConfigServiceGetConfigValueData = ( + queryClient: QueryClient, + { + accept, + option, + section, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + option: string; + section: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }), + queryFn: () => ConfigService.getConfigValue({ accept, option, section }), + }); +/** + * Get Configs + * Get configs for UI. + * @returns ConfigResponse Successful Response + * @throws ApiError + */ +export const ensureUseConfigServiceGetConfigsData = (queryClient: QueryClient) => + queryClient.ensureQueryData({ + queryKey: Common.UseConfigServiceGetConfigsKeyFn(), + queryFn: () => ConfigService.getConfigs(), + }); +/** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagWarningServiceListDagWarningsData = ( + queryClient: QueryClient, + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ dagId, limit, offset, orderBy, warningType }), + queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }), + }); +/** + * Get Dags + * Get all DAGs. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.tagsMatchMode + * @param data.owners + * @param data.dagIdPattern + * @param data.dagDisplayNamePattern + * @param data.excludeStale + * @param data.paused + * @param data.lastDagRunState + * @param data.dagRunStartDateGte + * @param data.dagRunStartDateLte + * @param data.dagRunEndDateGte + * @param data.dagRunEndDateLte + * @param data.dagRunState + * @param data.orderBy + * @returns DAGCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagServiceGetDagsData = ( + queryClient: QueryClient, + { + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunEndDateGte?: string; + dagRunEndDateLte?: string; + dagRunStartDateGte?: string; + dagRunStartDateLte?: string; + dagRunState?: string[]; + excludeStale?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagServiceGetDagsKeyFn({ + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }), + queryFn: () => + DagService.getDags({ + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }), + }); +/** + * Get Dag + * Get basic information about a DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagServiceGetDagData = ( + queryClient: QueryClient, + { + dagId, + }: { + dagId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }), + queryFn: () => DagService.getDag({ dagId }), + }); +/** + * Get Dag Details + * Get details of DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGDetailsResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagServiceGetDagDetailsData = ( + queryClient: QueryClient, + { + dagId, + }: { + dagId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }), + queryFn: () => DagService.getDagDetails({ dagId }), + }); +/** + * Get Dag Tags + * Get all DAG tags. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.tagNamePattern + * @returns DAGTagCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagServiceGetDagTagsData = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + tagNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + tagNamePattern?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }), + queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }), + }); +/** + * Recent Dag Runs + * Get recent DAG runs. + * @param data The data for the request. + * @param data.dagRunsLimit + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.tagsMatchMode + * @param data.owners + * @param data.dagIds + * @param data.dagIdPattern + * @param data.dagDisplayNamePattern + * @param data.excludeStale + * @param data.paused + * @param data.lastDagRunState + * @returns DAGWithLatestDagRunsCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagServiceRecentDagRunsData = ( + queryClient: QueryClient, + { + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagIds?: string[]; + dagRunsLimit?: number; + excludeStale?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagServiceRecentDagRunsKeyFn({ + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }), + queryFn: () => + DagService.recentDagRuns({ + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }), + }); +/** + * Get Event Log + * @param data The data for the request. + * @param data.eventLogId + * @returns EventLogResponse Successful Response + * @throws ApiError + */ +export const ensureUseEventLogServiceGetEventLogData = ( + queryClient: QueryClient, + { + eventLogId, + }: { + eventLogId: number; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }), + queryFn: () => EventLogService.getEventLog({ eventLogId }), + }); +/** + * Get Event Logs + * Get all Event Logs. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.dagId + * @param data.taskId + * @param data.runId + * @param data.mapIndex + * @param data.tryNumber + * @param data.owner + * @param data.event + * @param data.excludedEvents + * @param data.includedEvents + * @param data.before + * @param data.after + * @returns EventLogCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseEventLogServiceGetEventLogsData = ( + queryClient: QueryClient, + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }), + queryFn: () => + EventLogService.getEventLogs({ + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }), + }); +/** + * Get Extra Links + * Get extra links for task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns ExtraLinkCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseExtraLinksServiceGetExtraLinksData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), + queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Extra Links + * Get extra links for task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns ExtraLinkCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskInstanceServiceGetExtraLinksData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), + queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskInstanceServiceGetTaskInstanceData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + taskId, + }: { + dagId: string; + dagRunId: string; + taskId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }), + queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }), + }); +/** + * Get Mapped Task Instances + * Get list of mapped task instances. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.durationGte + * @param data.durationLte + * @param data.state + * @param data.pool + * @param data.queue + * @param data.executor + * @param data.versionNumber + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskInstanceServiceGetMappedTaskInstancesData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskId: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn({ + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }), + queryFn: () => + TaskInstanceService.getMappedTaskInstances({ + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }), + }); +/** + * Get Task Instance Dependencies + * Get dependencies blocking task from getting scheduled. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskDependencyCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + }), + queryFn: () => + TaskInstanceService.getTaskInstanceDependenciesByMapIndex({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Task Instance Dependencies + * Get dependencies blocking task from getting scheduled. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskDependencyCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskInstanceServiceGetTaskInstanceDependenciesData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + }), + queryFn: () => TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Task Instance Tries + * Get list of task instances history. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskInstanceServiceGetTaskInstanceTriesData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }), + queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Mapped Task Instance Tries + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskInstanceServiceGetMappedTaskInstanceTriesData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + }), + queryFn: () => TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Mapped Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskInstanceServiceGetMappedTaskInstanceData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ dagId, dagRunId, mapIndex, taskId }), + queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Task Instances + * Get list of task instances. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs + * and DAG runs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.durationGte + * @param data.durationLte + * @param data.taskDisplayNamePattern + * @param data.state + * @param data.pool + * @param data.queue + * @param data.executor + * @param data.versionNumber + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskInstanceServiceGetTaskInstancesData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskDisplayNamePattern?: string; + taskId?: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn({ + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }), + queryFn: () => + TaskInstanceService.getTaskInstances({ + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }), + }); +/** + * Get Task Instance Try Details + * Get task instance details by try number. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.taskTryNumber + * @param data.mapIndex + * @returns TaskInstanceHistoryResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskInstanceServiceGetTaskInstanceTryDetailsData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + taskTryNumber: number; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }), + queryFn: () => + TaskInstanceService.getTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }), + }); +/** + * Get Mapped Task Instance Try Details + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.taskTryNumber + * @param data.mapIndex + * @returns TaskInstanceHistoryResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskInstanceServiceGetMappedTaskInstanceTryDetailsData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + taskTryNumber: number; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }), + queryFn: () => + TaskInstanceService.getMappedTaskInstanceTryDetails({ + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }), + }); +/** + * Get Log + * Get logs for a specific task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.tryNumber + * @param data.fullContent + * @param data.mapIndex + * @param data.token + * @param data.accept + * @returns TaskInstancesLogResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskInstanceServiceGetLogData = ( + queryClient: QueryClient, + { + accept, + dagId, + dagRunId, + fullContent, + mapIndex, + taskId, + token, + tryNumber, + }: { + accept?: "application/json" | "*/*" | "application/x-ndjson"; + dagId: string; + dagRunId: string; + fullContent?: boolean; + mapIndex?: number; + taskId: string; + token?: string; + tryNumber: number; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskInstanceServiceGetLogKeyFn({ + accept, + dagId, + dagRunId, + fullContent, + mapIndex, + taskId, + token, + tryNumber, + }), + queryFn: () => + TaskInstanceService.getLog({ + accept, + dagId, + dagRunId, + fullContent, + mapIndex, + taskId, + token, + tryNumber, + }), + }); +/** + * Get Import Error + * Get an import error. + * @param data The data for the request. + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response + * @throws ApiError + */ +export const ensureUseImportErrorServiceGetImportErrorData = ( + queryClient: QueryClient, + { + importErrorId, + }: { + importErrorId: number; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }), + queryFn: () => ImportErrorService.getImportError({ importErrorId }), + }); +/** + * Get Import Errors + * Get all import errors. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseImportErrorServiceGetImportErrorsData = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }), + queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }), + }); +/** + * Get Jobs + * Get all jobs. + * @param data The data for the request. + * @param data.isAlive + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.jobState + * @param data.jobType + * @param data.hostname + * @param data.executorClass + * @returns JobCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseJobServiceGetJobsData = ( + queryClient: QueryClient, + { + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }: { + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string; + startDateGte?: string; + startDateLte?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseJobServiceGetJobsKeyFn({ + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }), + queryFn: () => + JobService.getJobs({ + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }), + }); +/** + * Get Plugins + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUsePluginServiceGetPluginsData = ( + queryClient: QueryClient, + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }), + queryFn: () => PluginService.getPlugins({ limit, offset }), + }); +/** + * Get Pool + * Get a pool. + * @param data The data for the request. + * @param data.poolName + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const ensureUsePoolServiceGetPoolData = ( + queryClient: QueryClient, + { + poolName, + }: { + poolName: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }), + queryFn: () => PoolService.getPool({ poolName }), + }); +/** + * Get Pools + * Get all pools entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.poolNamePattern + * @returns PoolCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUsePoolServiceGetPoolsData = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + poolNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + poolNamePattern?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }), + queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }), + }); +/** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseProviderServiceGetProvidersData = ( + queryClient: QueryClient, + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }), + queryFn: () => ProviderService.getProviders({ limit, offset }), + }); +/** + * Get Xcom Entry + * Get an XCom entry. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.dagRunId + * @param data.xcomKey + * @param data.mapIndex + * @param data.deserialize + * @param data.stringify + * @returns unknown Successful Response + * @throws ApiError + */ +export const ensureUseXcomServiceGetXcomEntryData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + deserialize, + mapIndex, + stringify, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseXcomServiceGetXcomEntryKeyFn({ + dagId, + dagRunId, + deserialize, + mapIndex, + stringify, + taskId, + xcomKey, + }), + queryFn: () => + XcomService.getXcomEntry({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }), + }); +/** + * Get Xcom Entries + * Get all XCom entries. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.xcomKey + * @param data.mapIndex + * @param data.limit + * @param data.offset + * @returns XComCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseXcomServiceGetXcomEntriesData = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }), + queryFn: () => XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }), + }); +/** + * Get Tasks + * Get tasks for DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.orderBy + * @returns TaskCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskServiceGetTasksData = ( + queryClient: QueryClient, + { + dagId, + orderBy, + }: { + dagId: string; + orderBy?: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }), + queryFn: () => TaskService.getTasks({ dagId, orderBy }), + }); +/** + * Get Task + * Get simplified representation of a task. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @returns TaskResponse Successful Response + * @throws ApiError + */ +export const ensureUseTaskServiceGetTaskData = ( + queryClient: QueryClient, + { + dagId, + taskId, + }: { + dagId: string; + taskId: unknown; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }), + queryFn: () => TaskService.getTask({ dagId, taskId }), + }); +/** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const ensureUseVariableServiceGetVariableData = ( + queryClient: QueryClient, + { + variableKey, + }: { + variableKey: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }), + queryFn: () => VariableService.getVariable({ variableKey }), + }); +/** + * Get Variables + * Get all Variables entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.variableKeyPattern + * @returns VariableCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseVariableServiceGetVariablesData = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + variableKeyPattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + variableKeyPattern?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseVariableServiceGetVariablesKeyFn({ limit, offset, orderBy, variableKeyPattern }), + queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }), + }); +/** + * Get Dag Version + * Get one Dag Version. + * @param data The data for the request. + * @param data.dagId + * @param data.versionNumber + * @returns DagVersionResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagVersionServiceGetDagVersionData = ( + queryClient: QueryClient, + { + dagId, + versionNumber, + }: { + dagId: string; + versionNumber: number; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }), + queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }), + }); +/** + * Get Dag Versions + * Get all DAG Versions. + * + * This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.versionNumber + * @param data.bundleName + * @param data.bundleVersion + * @param data.orderBy + * @returns DAGVersionCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseDagVersionServiceGetDagVersionsData = ( + queryClient: QueryClient, + { + bundleName, + bundleVersion, + dagId, + limit, + offset, + orderBy, + versionNumber, + }: { + bundleName?: string; + bundleVersion?: string; + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + versionNumber?: number; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ + bundleName, + bundleVersion, + dagId, + limit, + offset, + orderBy, + versionNumber, + }), + queryFn: () => + DagVersionService.getDagVersions({ + bundleName, + bundleVersion, + dagId, + limit, + offset, + orderBy, + versionNumber, + }), + }); +/** + * Get Health + * @returns HealthInfoResponse Successful Response + * @throws ApiError + */ +export const ensureUseMonitorServiceGetHealthData = (queryClient: QueryClient) => + queryClient.ensureQueryData({ + queryKey: Common.UseMonitorServiceGetHealthKeyFn(), + queryFn: () => MonitorService.getHealth(), + }); +/** + * Get Version + * Get version information. + * @returns VersionInfo Successful Response + * @throws ApiError + */ +export const ensureUseVersionServiceGetVersionData = (queryClient: QueryClient) => + queryClient.ensureQueryData({ + queryKey: Common.UseVersionServiceGetVersionKeyFn(), + queryFn: () => VersionService.getVersion(), + }); +/** + * Login + * Redirect to the login URL depending on the AuthManager configured. + * @param data The data for the request. + * @param data.next + * @returns unknown Successful Response + * @throws ApiError + */ +export const ensureUseLoginServiceLoginData = ( + queryClient: QueryClient, + { + next, + }: { + next?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseLoginServiceLoginKeyFn({ next }), + queryFn: () => LoginService.login({ next }), + }); +/** + * Logout + * Logout the user. + * @param data The data for the request. + * @param data.next + * @returns unknown Successful Response + * @throws ApiError + */ +export const ensureUseLoginServiceLogoutData = ( + queryClient: QueryClient, + { + next, + }: { + next?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseLoginServiceLogoutKeyFn({ next }), + queryFn: () => LoginService.logout({ next }), + }); +/** + * Get Auth Menus + * @returns MenuItemCollectionResponse Successful Response + * @throws ApiError + */ +export const ensureUseAuthLinksServiceGetAuthMenusData = (queryClient: QueryClient) => + queryClient.ensureQueryData({ + queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(), + queryFn: () => AuthLinksService.getAuthMenus(), + }); +/** + * Get Dependencies + * Dependencies graph. + * @param data The data for the request. + * @param data.nodeId + * @returns BaseGraphResponse Successful Response + * @throws ApiError + */ +export const ensureUseDependenciesServiceGetDependenciesData = ( + queryClient: QueryClient, + { + nodeId, + }: { + nodeId?: string; + } = {}, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }), + queryFn: () => DependenciesService.getDependencies({ nodeId }), + }); +/** + * Historical Metrics + * Return cluster activity historical metrics. + * @param data The data for the request. + * @param data.startDate + * @param data.endDate + * @returns HistoricalMetricDataResponse Successful Response + * @throws ApiError + */ +export const ensureUseDashboardServiceHistoricalMetricsData = ( + queryClient: QueryClient, + { + endDate, + startDate, + }: { + endDate?: string; + startDate: string; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }), + queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }), + }); +/** + * Dag Stats + * Return basic DAG stats with counts of DAGs in various states. + * @returns DashboardDagStatsResponse Successful Response + * @throws ApiError + */ +export const ensureUseDashboardServiceDagStatsData = (queryClient: QueryClient) => + queryClient.ensureQueryData({ + queryKey: Common.UseDashboardServiceDagStatsKeyFn(), + queryFn: () => DashboardService.dagStats(), + }); +/** + * Structure Data + * Get Structure Data. + * @param data The data for the request. + * @param data.dagId + * @param data.includeUpstream + * @param data.includeDownstream + * @param data.root + * @param data.externalDependencies + * @param data.versionNumber + * @returns StructureDataResponse Successful Response + * @throws ApiError + */ +export const ensureUseStructureServiceStructureDataData = ( + queryClient: QueryClient, + { + dagId, + externalDependencies, + includeDownstream, + includeUpstream, + root, + versionNumber, + }: { + dagId: string; + externalDependencies?: boolean; + includeDownstream?: boolean; + includeUpstream?: boolean; + root?: string; + versionNumber?: number; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseStructureServiceStructureDataKeyFn({ + dagId, + externalDependencies, + includeDownstream, + includeUpstream, + root, + versionNumber, + }), + queryFn: () => + StructureService.structureData({ + dagId, + externalDependencies, + includeDownstream, + includeUpstream, + root, + versionNumber, + }), + }); +/** + * Grid Data + * Return grid data. + * @param data The data for the request. + * @param data.dagId + * @param data.includeUpstream + * @param data.includeDownstream + * @param data.root + * @param data.offset + * @param data.runType + * @param data.state + * @param data.limit + * @param data.orderBy + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @returns GridResponse Successful Response + * @throws ApiError + */ +export const ensureUseGridServiceGridDataData = ( + queryClient: QueryClient, + { + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }: { + dagId: string; + includeDownstream?: boolean; + includeUpstream?: boolean; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + root?: string; + runAfterGte?: string; + runAfterLte?: string; + runType?: string[]; + state?: string[]; + }, +) => + queryClient.ensureQueryData({ + queryKey: Common.UseGridServiceGridDataKeyFn({ + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }), + queryFn: () => + GridService.gridData({ + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }), + }); diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/index.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/index.ts index 8e9b6922f00c8..987c8a4ea6dde 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/index.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/index.ts @@ -1,4 +1,4 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 +// generated with @7nohe/openapi-react-query-codegen@1.6.2 export * from "./common"; export * from "./queries"; diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/infiniteQueries.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/infiniteQueries.ts index 37298729b1133..0baac0445f402 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/infiniteQueries.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/infiniteQueries.ts @@ -1,2 +1 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 - +// generated with @7nohe/openapi-react-query-codegen@1.6.2 diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts index acaf6ad80eca7..b9039a10f3719 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/prefetch.ts @@ -1,1234 +1,2529 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 - +// generated with @7nohe/openapi-react-query-codegen@1.6.2 import { type QueryClient } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; + +import { + AssetService, + AuthLinksService, + BackfillService, + ConfigService, + ConnectionService, + DagReportService, + DagRunService, + DagService, + DagSourceService, + DagStatsService, + DagVersionService, + DagWarningService, + DashboardService, + DependenciesService, + EventLogService, + ExtraLinksService, + GridService, + ImportErrorService, + JobService, + LoginService, + MonitorService, + PluginService, + PoolService, + ProviderService, + StructureService, + TaskInstanceService, + TaskService, + VariableService, + VersionService, + XcomService, +} from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; + /** -* Get Assets -* Get assets. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.namePattern -* @param data.uriPattern -* @param data.dagIds -* @param data.onlyActive -* @param data.orderBy -* @returns AssetCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseAssetServiceGetAssets = (queryClient: QueryClient, { dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }: { - dagIds?: string[]; - limit?: number; - namePattern?: string; - offset?: number; - onlyActive?: boolean; - orderBy?: string; - uriPattern?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetAssetsKeyFn({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }), queryFn: () => AssetService.getAssets({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }) }); -/** -* Get Asset Aliases -* Get asset aliases. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.namePattern -* @param data.orderBy -* @returns AssetAliasCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseAssetServiceGetAssetAliases = (queryClient: QueryClient, { limit, namePattern, offset, orderBy }: { - limit?: number; - namePattern?: string; - offset?: number; - orderBy?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }), queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }) }); -/** -* Get Asset Alias -* Get an asset alias. -* @param data The data for the request. -* @param data.assetAliasId -* @returns unknown Successful Response -* @throws ApiError -*/ -export const prefetchUseAssetServiceGetAssetAlias = (queryClient: QueryClient, { assetAliasId }: { - assetAliasId: number; -}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }), queryFn: () => AssetService.getAssetAlias({ assetAliasId }) }); -/** -* Get Asset Events -* Get asset events. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.assetId -* @param data.sourceDagId -* @param data.sourceTaskId -* @param data.sourceRunId -* @param data.sourceMapIndex -* @param data.timestampGte -* @param data.timestampLte -* @returns AssetEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseAssetServiceGetAssetEvents = (queryClient: QueryClient, { assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }: { - assetId?: number; - limit?: number; - offset?: number; - orderBy?: string; - sourceDagId?: string; - sourceMapIndex?: number; - sourceRunId?: string; - sourceTaskId?: string; - timestampGte?: string; - timestampLte?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetAssetEventsKeyFn({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }), queryFn: () => AssetService.getAssetEvents({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }) }); -/** -* Get Asset Queued Events -* Get queued asset events for an asset. -* @param data The data for the request. -* @param data.assetId -* @param data.before -* @returns QueuedEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseAssetServiceGetAssetQueuedEvents = (queryClient: QueryClient, { assetId, before }: { - assetId: number; - before?: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }), queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }) }); -/** -* Get Asset -* Get an asset. -* @param data The data for the request. -* @param data.assetId -* @returns AssetResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseAssetServiceGetAsset = (queryClient: QueryClient, { assetId }: { - assetId: number; -}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }), queryFn: () => AssetService.getAsset({ assetId }) }); -/** -* Get Dag Asset Queued Events -* Get queued asset events for a DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.before -* @returns QueuedEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseAssetServiceGetDagAssetQueuedEvents = (queryClient: QueryClient, { before, dagId }: { - before?: string; - dagId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }), queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }) }); -/** -* Get Dag Asset Queued Event -* Get a queued asset event for a DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.assetId -* @param data.before -* @returns QueuedEventResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseAssetServiceGetDagAssetQueuedEvent = (queryClient: QueryClient, { assetId, before, dagId }: { - assetId: number; - before?: string; - dagId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }), queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }) }); -/** -* Next Run Assets -* @param data The data for the request. -* @param data.dagId -* @returns unknown Successful Response -* @throws ApiError -*/ -export const prefetchUseAssetServiceNextRunAssets = (queryClient: QueryClient, { dagId }: { - dagId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }), queryFn: () => AssetService.nextRunAssets({ dagId }) }); -/** -* List Backfills -* @param data The data for the request. -* @param data.dagId -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns BackfillCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseBackfillServiceListBackfills = (queryClient: QueryClient, { dagId, limit, offset, orderBy }: { - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }), queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }) }); -/** -* Get Backfill -* @param data The data for the request. -* @param data.backfillId -* @returns BackfillResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseBackfillServiceGetBackfill = (queryClient: QueryClient, { backfillId }: { - backfillId: number; -}) => queryClient.prefetchQuery({ queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }), queryFn: () => BackfillService.getBackfill({ backfillId }) }); -/** -* List Backfills -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.dagId -* @param data.active -* @returns BackfillCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseBackfillServiceListBackfills1 = (queryClient: QueryClient, { active, dagId, limit, offset, orderBy }: { - active?: boolean; - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseBackfillServiceListBackfills1KeyFn({ active, dagId, limit, offset, orderBy }), queryFn: () => BackfillService.listBackfills1({ active, dagId, limit, offset, orderBy }) }); -/** -* Get Connection -* Get a connection entry. -* @param data The data for the request. -* @param data.connectionId -* @returns ConnectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseConnectionServiceGetConnection = (queryClient: QueryClient, { connectionId }: { - connectionId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }), queryFn: () => ConnectionService.getConnection({ connectionId }) }); -/** -* Get Connections -* Get all connection entries. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.connectionIdPattern -* @returns ConnectionCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseConnectionServiceGetConnections = (queryClient: QueryClient, { connectionIdPattern, limit, offset, orderBy }: { - connectionIdPattern?: string; - limit?: number; - offset?: number; - orderBy?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ connectionIdPattern, limit, offset, orderBy }), queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }) }); -/** -* Hook Meta Data -* Retrieve information about available connection types (hook classes) and their parameters. -* @returns ConnectionHookMetaData Successful Response -* @throws ApiError -*/ -export const prefetchUseConnectionServiceHookMetaData = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(), queryFn: () => ConnectionService.hookMetaData() }); -/** -* Get Dag Run -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @returns DAGRunResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagRunServiceGetDagRun = (queryClient: QueryClient, { dagId, dagRunId }: { - dagId: string; - dagRunId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) }); -/** -* Get Upstream Asset Events -* If dag run is asset-triggered, return the asset events that triggered it. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @returns AssetEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagRunServiceGetUpstreamAssetEvents = (queryClient: QueryClient, { dagId, dagRunId }: { - dagId: string; - dagRunId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }), queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }) }); -/** -* Get Dag Runs -* Get all DAG Runs. -* -* This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. -* @param data The data for the request. -* @param data.dagId -* @param data.limit -* @param data.offset -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.updatedAtGte -* @param data.updatedAtLte -* @param data.runType -* @param data.state -* @param data.orderBy -* @returns DAGRunCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagRunServiceGetDagRuns = (queryClient: QueryClient, { dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { - dagId: string; - endDateGte?: string; - endDateLte?: string; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - updatedAtGte?: string; - updatedAtLte?: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }) }); -/** -* Get Dag Source -* Get source code using file token. -* @param data The data for the request. -* @param data.dagId -* @param data.versionNumber -* @param data.accept -* @returns DAGSourceResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagSourceServiceGetDagSource = (queryClient: QueryClient, { accept, dagId, versionNumber }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - versionNumber?: number; -}) => queryClient.prefetchQuery({ queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }), queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }) }); -/** -* Get Dag Stats -* Get Dag statistics. -* @param data The data for the request. -* @param data.dagIds -* @returns DagStatsCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagStatsServiceGetDagStats = (queryClient: QueryClient, { dagIds }: { - dagIds?: string[]; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }), queryFn: () => DagStatsService.getDagStats({ dagIds }) }); -/** -* Get Dag Reports -* Get DAG report. -* @param data The data for the request. -* @param data.subdir -* @returns unknown Successful Response -* @throws ApiError -*/ -export const prefetchUseDagReportServiceGetDagReports = (queryClient: QueryClient, { subdir }: { - subdir: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }), queryFn: () => DagReportService.getDagReports({ subdir }) }); -/** -* Get Config -* @param data The data for the request. -* @param data.section -* @param data.accept -* @returns Config Successful Response -* @throws ApiError -*/ -export const prefetchUseConfigServiceGetConfig = (queryClient: QueryClient, { accept, section }: { - accept?: "application/json" | "text/plain" | "*/*"; - section?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }), queryFn: () => ConfigService.getConfig({ accept, section }) }); -/** -* Get Config Value -* @param data The data for the request. -* @param data.section -* @param data.option -* @param data.accept -* @returns Config Successful Response -* @throws ApiError -*/ -export const prefetchUseConfigServiceGetConfigValue = (queryClient: QueryClient, { accept, option, section }: { - accept?: "application/json" | "text/plain" | "*/*"; - option: string; - section: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }), queryFn: () => ConfigService.getConfigValue({ accept, option, section }) }); -/** -* Get Configs -* Get configs for UI. -* @returns ConfigResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseConfigServiceGetConfigs = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseConfigServiceGetConfigsKeyFn(), queryFn: () => ConfigService.getConfigs() }); -/** -* List Dag Warnings -* Get a list of DAG warnings. -* @param data The data for the request. -* @param data.dagId -* @param data.warningType -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns DAGWarningCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagWarningServiceListDagWarnings = (queryClient: QueryClient, { dagId, limit, offset, orderBy, warningType }: { - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - warningType?: DagWarningType; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ dagId, limit, offset, orderBy, warningType }), queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }) }); -/** -* Get Dags -* Get all DAGs. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.tags -* @param data.tagsMatchMode -* @param data.owners -* @param data.dagIdPattern -* @param data.dagDisplayNamePattern -* @param data.excludeStale -* @param data.paused -* @param data.lastDagRunState -* @param data.dagRunStartDateGte -* @param data.dagRunStartDateLte -* @param data.dagRunEndDateGte -* @param data.dagRunEndDateLte -* @param data.dagRunState -* @param data.orderBy -* @returns DAGCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagServiceGetDags = (queryClient: QueryClient, { dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagRunEndDateGte?: string; - dagRunEndDateLte?: string; - dagRunStartDateGte?: string; - dagRunStartDateLte?: string; - dagRunState?: string[]; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - orderBy?: string; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.getDags({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) }); -/** -* Get Dag -* Get basic information about a DAG. -* @param data The data for the request. -* @param data.dagId -* @returns DAGResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagServiceGetDag = (queryClient: QueryClient, { dagId }: { - dagId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }), queryFn: () => DagService.getDag({ dagId }) }); -/** -* Get Dag Details -* Get details of DAG. -* @param data The data for the request. -* @param data.dagId -* @returns DAGDetailsResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagServiceGetDagDetails = (queryClient: QueryClient, { dagId }: { - dagId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }), queryFn: () => DagService.getDagDetails({ dagId }) }); -/** -* Get Dag Tags -* Get all DAG tags. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.tagNamePattern -* @returns DAGTagCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagServiceGetDagTags = (queryClient: QueryClient, { limit, offset, orderBy, tagNamePattern }: { - limit?: number; - offset?: number; - orderBy?: string; - tagNamePattern?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }), queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }) }); -/** -* Recent Dag Runs -* Get recent DAG runs. -* @param data The data for the request. -* @param data.dagRunsLimit -* @param data.limit -* @param data.offset -* @param data.tags -* @param data.tagsMatchMode -* @param data.owners -* @param data.dagIds -* @param data.dagIdPattern -* @param data.dagDisplayNamePattern -* @param data.excludeStale -* @param data.paused -* @param data.lastDagRunState -* @returns DAGWithLatestDagRunsCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagServiceRecentDagRuns = (queryClient: QueryClient, { dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagIds?: string[]; - dagRunsLimit?: number; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDagServiceRecentDagRunsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }), queryFn: () => DagService.recentDagRuns({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }) }); -/** -* Get Event Log -* @param data The data for the request. -* @param data.eventLogId -* @returns EventLogResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseEventLogServiceGetEventLog = (queryClient: QueryClient, { eventLogId }: { - eventLogId: number; -}) => queryClient.prefetchQuery({ queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }), queryFn: () => EventLogService.getEventLog({ eventLogId }) }); -/** -* Get Event Logs -* Get all Event Logs. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.dagId -* @param data.taskId -* @param data.runId -* @param data.mapIndex -* @param data.tryNumber -* @param data.owner -* @param data.event -* @param data.excludedEvents -* @param data.includedEvents -* @param data.before -* @param data.after -* @returns EventLogCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseEventLogServiceGetEventLogs = (queryClient: QueryClient, { after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }: { - after?: string; - before?: string; - dagId?: string; - event?: string; - excludedEvents?: string[]; - includedEvents?: string[]; - limit?: number; - mapIndex?: number; - offset?: number; - orderBy?: string; - owner?: string; - runId?: string; - taskId?: string; - tryNumber?: number; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }), queryFn: () => EventLogService.getEventLogs({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }) }); -/** -* Get Extra Links -* Get extra links for task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns ExtraLinkCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseExtraLinksServiceGetExtraLinks = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Extra Links -* Get extra links for task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns ExtraLinkCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskInstanceServiceGetExtraLinks = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Task Instance -* Get task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @returns TaskInstanceResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskInstanceServiceGetTaskInstance = (queryClient: QueryClient, { dagId, dagRunId, taskId }: { - dagId: string; - dagRunId: string; - taskId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }), queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) }); -/** -* Get Mapped Task Instances -* Get list of mapped task instances. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.updatedAtGte -* @param data.updatedAtLte -* @param data.durationGte -* @param data.durationLte -* @param data.state -* @param data.pool -* @param data.queue -* @param data.executor -* @param data.versionNumber -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskInstanceServiceGetMappedTaskInstances = (queryClient: QueryClient, { dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskId: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }), queryFn: () => TaskInstanceService.getMappedTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }) }); -/** -* Get Task Instance Dependencies -* Get dependencies blocking task from getting scheduled. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskDependencyCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndex = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getTaskInstanceDependenciesByMapIndex({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Task Instance Dependencies -* Get dependencies blocking task from getting scheduled. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskDependencyCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskInstanceServiceGetTaskInstanceDependencies = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Task Instance Tries -* Get list of task instances history. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskInstanceHistoryCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskInstanceServiceGetTaskInstanceTries = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Mapped Task Instance Tries -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskInstanceHistoryCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskInstanceServiceGetMappedTaskInstanceTries = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Mapped Task Instance -* Get task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskInstanceResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskInstanceServiceGetMappedTaskInstance = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ dagId, dagRunId, mapIndex, taskId }), queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }) }); -/** -* Get Task Instances -* Get list of task instances. -* -* This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs -* and DAG runs. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.updatedAtGte -* @param data.updatedAtLte -* @param data.durationGte -* @param data.durationLte -* @param data.taskDisplayNamePattern -* @param data.state -* @param data.pool -* @param data.queue -* @param data.executor -* @param data.versionNumber -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskInstanceServiceGetTaskInstances = (queryClient: QueryClient, { dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskDisplayNamePattern?: string; - taskId?: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }), queryFn: () => TaskInstanceService.getTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }) }); -/** -* Get Task Instance Try Details -* Get task instance details by try number. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.taskTryNumber -* @param data.mapIndex -* @returns TaskInstanceHistoryResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskInstanceServiceGetTaskInstanceTryDetails = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - taskTryNumber: number; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }), queryFn: () => TaskInstanceService.getTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) }); -/** -* Get Mapped Task Instance Try Details -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.taskTryNumber -* @param data.mapIndex -* @returns TaskInstanceHistoryResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskInstanceServiceGetMappedTaskInstanceTryDetails = (queryClient: QueryClient, { dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - taskTryNumber: number; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }), queryFn: () => TaskInstanceService.getMappedTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) }); -/** -* Get Log -* Get logs for a specific task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.tryNumber -* @param data.fullContent -* @param data.mapIndex -* @param data.token -* @param data.accept -* @returns TaskInstancesLogResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskInstanceServiceGetLog = (queryClient: QueryClient, { accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }: { - accept?: "application/json" | "*/*" | "application/x-ndjson"; - dagId: string; - dagRunId: string; - fullContent?: boolean; - mapIndex?: number; - taskId: string; - token?: string; - tryNumber: number; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskInstanceServiceGetLogKeyFn({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }), queryFn: () => TaskInstanceService.getLog({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }) }); -/** -* Get Import Error -* Get an import error. -* @param data The data for the request. -* @param data.importErrorId -* @returns ImportErrorResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseImportErrorServiceGetImportError = (queryClient: QueryClient, { importErrorId }: { - importErrorId: number; -}) => queryClient.prefetchQuery({ queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }), queryFn: () => ImportErrorService.getImportError({ importErrorId }) }); -/** -* Get Import Errors -* Get all import errors. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns ImportErrorCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseImportErrorServiceGetImportErrors = (queryClient: QueryClient, { limit, offset, orderBy }: { - limit?: number; - offset?: number; - orderBy?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }), queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }) }); -/** -* Get Jobs -* Get all jobs. -* @param data The data for the request. -* @param data.isAlive -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.jobState -* @param data.jobType -* @param data.hostname -* @param data.executorClass -* @returns JobCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseJobServiceGetJobs = (queryClient: QueryClient, { endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }: { - endDateGte?: string; - endDateLte?: string; - executorClass?: string; - hostname?: string; - isAlive?: boolean; - jobState?: string; - jobType?: string; - limit?: number; - offset?: number; - orderBy?: string; - startDateGte?: string; - startDateLte?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseJobServiceGetJobsKeyFn({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }), queryFn: () => JobService.getJobs({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }) }); -/** -* Get Plugins -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @returns PluginCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUsePluginServiceGetPlugins = (queryClient: QueryClient, { limit, offset }: { - limit?: number; - offset?: number; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }), queryFn: () => PluginService.getPlugins({ limit, offset }) }); -/** -* Get Pool -* Get a pool. -* @param data The data for the request. -* @param data.poolName -* @returns PoolResponse Successful Response -* @throws ApiError -*/ -export const prefetchUsePoolServiceGetPool = (queryClient: QueryClient, { poolName }: { - poolName: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }), queryFn: () => PoolService.getPool({ poolName }) }); -/** -* Get Pools -* Get all pools entries. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.poolNamePattern -* @returns PoolCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUsePoolServiceGetPools = (queryClient: QueryClient, { limit, offset, orderBy, poolNamePattern }: { - limit?: number; - offset?: number; - orderBy?: string; - poolNamePattern?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }), queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }) }); -/** -* Get Providers -* Get providers. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @returns ProviderCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseProviderServiceGetProviders = (queryClient: QueryClient, { limit, offset }: { - limit?: number; - offset?: number; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }), queryFn: () => ProviderService.getProviders({ limit, offset }) }); -/** -* Get Xcom Entry -* Get an XCom entry. -* @param data The data for the request. -* @param data.dagId -* @param data.taskId -* @param data.dagRunId -* @param data.xcomKey -* @param data.mapIndex -* @param data.deserialize -* @param data.stringify -* @returns unknown Successful Response -* @throws ApiError -*/ -export const prefetchUseXcomServiceGetXcomEntry = (queryClient: QueryClient, { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }: { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseXcomServiceGetXcomEntryKeyFn({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }), queryFn: () => XcomService.getXcomEntry({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }) }); -/** -* Get Xcom Entries -* Get all XCom entries. -* -* This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.xcomKey -* @param data.mapIndex -* @param data.limit -* @param data.offset -* @returns XComCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseXcomServiceGetXcomEntries = (queryClient: QueryClient, { dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }: { - dagId: string; - dagRunId: string; - limit?: number; - mapIndex?: number; - offset?: number; - taskId: string; - xcomKey?: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }), queryFn: () => XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }) }); -/** -* Get Tasks -* Get tasks for DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.orderBy -* @returns TaskCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskServiceGetTasks = (queryClient: QueryClient, { dagId, orderBy }: { - dagId: string; - orderBy?: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }), queryFn: () => TaskService.getTasks({ dagId, orderBy }) }); -/** -* Get Task -* Get simplified representation of a task. -* @param data The data for the request. -* @param data.dagId -* @param data.taskId -* @returns TaskResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseTaskServiceGetTask = (queryClient: QueryClient, { dagId, taskId }: { - dagId: string; - taskId: unknown; -}) => queryClient.prefetchQuery({ queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }), queryFn: () => TaskService.getTask({ dagId, taskId }) }); -/** -* Get Variable -* Get a variable entry. -* @param data The data for the request. -* @param data.variableKey -* @returns VariableResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseVariableServiceGetVariable = (queryClient: QueryClient, { variableKey }: { - variableKey: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }), queryFn: () => VariableService.getVariable({ variableKey }) }); -/** -* Get Variables -* Get all Variables entries. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.variableKeyPattern -* @returns VariableCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseVariableServiceGetVariables = (queryClient: QueryClient, { limit, offset, orderBy, variableKeyPattern }: { - limit?: number; - offset?: number; - orderBy?: string; - variableKeyPattern?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseVariableServiceGetVariablesKeyFn({ limit, offset, orderBy, variableKeyPattern }), queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }) }); -/** -* Get Dag Version -* Get one Dag Version. -* @param data The data for the request. -* @param data.dagId -* @param data.versionNumber -* @returns DagVersionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagVersionServiceGetDagVersion = (queryClient: QueryClient, { dagId, versionNumber }: { - dagId: string; - versionNumber: number; -}) => queryClient.prefetchQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }), queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }) }); -/** -* Get Dag Versions -* Get all DAG Versions. -* -* This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. -* @param data The data for the request. -* @param data.dagId -* @param data.limit -* @param data.offset -* @param data.versionNumber -* @param data.bundleName -* @param data.bundleVersion -* @param data.orderBy -* @returns DAGVersionCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDagVersionServiceGetDagVersions = (queryClient: QueryClient, { bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }: { - bundleName?: string; - bundleVersion?: string; - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - versionNumber?: number; -}) => queryClient.prefetchQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) }); -/** -* Get Health -* @returns HealthInfoResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseMonitorServiceGetHealth = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseMonitorServiceGetHealthKeyFn(), queryFn: () => MonitorService.getHealth() }); -/** -* Get Version -* Get version information. -* @returns VersionInfo Successful Response -* @throws ApiError -*/ -export const prefetchUseVersionServiceGetVersion = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseVersionServiceGetVersionKeyFn(), queryFn: () => VersionService.getVersion() }); -/** -* Login -* Redirect to the login URL depending on the AuthManager configured. -* @param data The data for the request. -* @param data.next -* @returns unknown Successful Response -* @throws ApiError -*/ -export const prefetchUseLoginServiceLogin = (queryClient: QueryClient, { next }: { - next?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseLoginServiceLoginKeyFn({ next }), queryFn: () => LoginService.login({ next }) }); -/** -* Logout -* Logout the user. -* @param data The data for the request. -* @param data.next -* @returns unknown Successful Response -* @throws ApiError -*/ -export const prefetchUseLoginServiceLogout = (queryClient: QueryClient, { next }: { - next?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseLoginServiceLogoutKeyFn({ next }), queryFn: () => LoginService.logout({ next }) }); -/** -* Get Auth Menus -* @returns MenuItemCollectionResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseAuthLinksServiceGetAuthMenus = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(), queryFn: () => AuthLinksService.getAuthMenus() }); -/** -* Get Dependencies -* Dependencies graph. -* @param data The data for the request. -* @param data.nodeId -* @returns BaseGraphResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDependenciesServiceGetDependencies = (queryClient: QueryClient, { nodeId }: { - nodeId?: string; -} = {}) => queryClient.prefetchQuery({ queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }), queryFn: () => DependenciesService.getDependencies({ nodeId }) }); -/** -* Historical Metrics -* Return cluster activity historical metrics. -* @param data The data for the request. -* @param data.startDate -* @param data.endDate -* @returns HistoricalMetricDataResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDashboardServiceHistoricalMetrics = (queryClient: QueryClient, { endDate, startDate }: { - endDate?: string; - startDate: string; -}) => queryClient.prefetchQuery({ queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }), queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }) }); -/** -* Dag Stats -* Return basic DAG stats with counts of DAGs in various states. -* @returns DashboardDagStatsResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseDashboardServiceDagStats = (queryClient: QueryClient) => queryClient.prefetchQuery({ queryKey: Common.UseDashboardServiceDagStatsKeyFn(), queryFn: () => DashboardService.dagStats() }); -/** -* Structure Data -* Get Structure Data. -* @param data The data for the request. -* @param data.dagId -* @param data.includeUpstream -* @param data.includeDownstream -* @param data.root -* @param data.externalDependencies -* @param data.versionNumber -* @returns StructureDataResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseStructureServiceStructureData = (queryClient: QueryClient, { dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }: { - dagId: string; - externalDependencies?: boolean; - includeDownstream?: boolean; - includeUpstream?: boolean; - root?: string; - versionNumber?: number; -}) => queryClient.prefetchQuery({ queryKey: Common.UseStructureServiceStructureDataKeyFn({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }), queryFn: () => StructureService.structureData({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }) }); -/** -* Grid Data -* Return grid data. -* @param data The data for the request. -* @param data.dagId -* @param data.includeUpstream -* @param data.includeDownstream -* @param data.root -* @param data.offset -* @param data.runType -* @param data.state -* @param data.limit -* @param data.orderBy -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @returns GridResponse Successful Response -* @throws ApiError -*/ -export const prefetchUseGridServiceGridData = (queryClient: QueryClient, { dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; -}) => queryClient.prefetchQuery({ queryKey: Common.UseGridServiceGridDataKeyFn({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }), queryFn: () => GridService.gridData({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }) }); + * Get Assets + * Get assets. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.namePattern + * @param data.uriPattern + * @param data.dagIds + * @param data.onlyActive + * @param data.orderBy + * @returns AssetCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseAssetServiceGetAssets = ( + queryClient: QueryClient, + { + dagIds, + limit, + namePattern, + offset, + onlyActive, + orderBy, + uriPattern, + }: { + dagIds?: string[]; + limit?: number; + namePattern?: string; + offset?: number; + onlyActive?: boolean; + orderBy?: string; + uriPattern?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseAssetServiceGetAssetsKeyFn({ + dagIds, + limit, + namePattern, + offset, + onlyActive, + orderBy, + uriPattern, + }), + queryFn: () => + AssetService.getAssets({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }), + }); +/** + * Get Asset Aliases + * Get asset aliases. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.namePattern + * @param data.orderBy + * @returns AssetAliasCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseAssetServiceGetAssetAliases = ( + queryClient: QueryClient, + { + limit, + namePattern, + offset, + orderBy, + }: { + limit?: number; + namePattern?: string; + offset?: number; + orderBy?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }), + queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }), + }); +/** + * Get Asset Alias + * Get an asset alias. + * @param data The data for the request. + * @param data.assetAliasId + * @returns unknown Successful Response + * @throws ApiError + */ +export const prefetchUseAssetServiceGetAssetAlias = ( + queryClient: QueryClient, + { + assetAliasId, + }: { + assetAliasId: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }), + queryFn: () => AssetService.getAssetAlias({ assetAliasId }), + }); +/** + * Get Asset Events + * Get asset events. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.assetId + * @param data.sourceDagId + * @param data.sourceTaskId + * @param data.sourceRunId + * @param data.sourceMapIndex + * @param data.timestampGte + * @param data.timestampLte + * @returns AssetEventCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseAssetServiceGetAssetEvents = ( + queryClient: QueryClient, + { + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }: { + assetId?: number; + limit?: number; + offset?: number; + orderBy?: string; + sourceDagId?: string; + sourceMapIndex?: number; + sourceRunId?: string; + sourceTaskId?: string; + timestampGte?: string; + timestampLte?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseAssetServiceGetAssetEventsKeyFn({ + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }), + queryFn: () => + AssetService.getAssetEvents({ + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }), + }); +/** + * Get Asset Queued Events + * Get queued asset events for an asset. + * @param data The data for the request. + * @param data.assetId + * @param data.before + * @returns QueuedEventCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseAssetServiceGetAssetQueuedEvents = ( + queryClient: QueryClient, + { + assetId, + before, + }: { + assetId: number; + before?: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }), + queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }), + }); +/** + * Get Asset + * Get an asset. + * @param data The data for the request. + * @param data.assetId + * @returns AssetResponse Successful Response + * @throws ApiError + */ +export const prefetchUseAssetServiceGetAsset = ( + queryClient: QueryClient, + { + assetId, + }: { + assetId: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }), + queryFn: () => AssetService.getAsset({ assetId }), + }); +/** + * Get Dag Asset Queued Events + * Get queued asset events for a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.before + * @returns QueuedEventCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseAssetServiceGetDagAssetQueuedEvents = ( + queryClient: QueryClient, + { + before, + dagId, + }: { + before?: string; + dagId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }), + queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }), + }); +/** + * Get Dag Asset Queued Event + * Get a queued asset event for a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.assetId + * @param data.before + * @returns QueuedEventResponse Successful Response + * @throws ApiError + */ +export const prefetchUseAssetServiceGetDagAssetQueuedEvent = ( + queryClient: QueryClient, + { + assetId, + before, + dagId, + }: { + assetId: number; + before?: string; + dagId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }), + queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }), + }); +/** + * Next Run Assets + * @param data The data for the request. + * @param data.dagId + * @returns unknown Successful Response + * @throws ApiError + */ +export const prefetchUseAssetServiceNextRunAssets = ( + queryClient: QueryClient, + { + dagId, + }: { + dagId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }), + queryFn: () => AssetService.nextRunAssets({ dagId }), + }); +/** + * List Backfills + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns BackfillCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseBackfillServiceListBackfills = ( + queryClient: QueryClient, + { + dagId, + limit, + offset, + orderBy, + }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }), + queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }), + }); +/** + * Get Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns BackfillResponse Successful Response + * @throws ApiError + */ +export const prefetchUseBackfillServiceGetBackfill = ( + queryClient: QueryClient, + { + backfillId, + }: { + backfillId: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }), + queryFn: () => BackfillService.getBackfill({ backfillId }), + }); +/** + * List Backfills + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.dagId + * @param data.active + * @returns BackfillCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseBackfillServiceListBackfills1 = ( + queryClient: QueryClient, + { + active, + dagId, + limit, + offset, + orderBy, + }: { + active?: boolean; + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseBackfillServiceListBackfills1KeyFn({ active, dagId, limit, offset, orderBy }), + queryFn: () => BackfillService.listBackfills1({ active, dagId, limit, offset, orderBy }), + }); +/** + * Get Connection + * Get a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseConnectionServiceGetConnection = ( + queryClient: QueryClient, + { + connectionId, + }: { + connectionId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }), + queryFn: () => ConnectionService.getConnection({ connectionId }), + }); +/** + * Get Connections + * Get all connection entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.connectionIdPattern + * @returns ConnectionCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseConnectionServiceGetConnections = ( + queryClient: QueryClient, + { + connectionIdPattern, + limit, + offset, + orderBy, + }: { + connectionIdPattern?: string; + limit?: number; + offset?: number; + orderBy?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ connectionIdPattern, limit, offset, orderBy }), + queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }), + }); +/** + * Hook Meta Data + * Retrieve information about available connection types (hook classes) and their parameters. + * @returns ConnectionHookMetaData Successful Response + * @throws ApiError + */ +export const prefetchUseConnectionServiceHookMetaData = (queryClient: QueryClient) => + queryClient.prefetchQuery({ + queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(), + queryFn: () => ConnectionService.hookMetaData(), + }); +/** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagRunServiceGetDagRun = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }), + }); +/** + * Get Upstream Asset Events + * If dag run is asset-triggered, return the asset events that triggered it. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns AssetEventCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagRunServiceGetUpstreamAssetEvents = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }), + queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }), + }); +/** + * Get Dag Runs + * Get all DAG Runs. + * + * This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.runType + * @param data.state + * @param data.orderBy + * @returns DAGRunCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagRunServiceGetDagRuns = ( + queryClient: QueryClient, + { + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }: { + dagId: string; + endDateGte?: string; + endDateLte?: string; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + runAfterGte?: string; + runAfterLte?: string; + runType?: string[]; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + updatedAtGte?: string; + updatedAtLte?: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }), + queryFn: () => + DagRunService.getDagRuns({ + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }), + }); +/** + * Get Dag Source + * Get source code using file token. + * @param data The data for the request. + * @param data.dagId + * @param data.versionNumber + * @param data.accept + * @returns DAGSourceResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagSourceServiceGetDagSource = ( + queryClient: QueryClient, + { + accept, + dagId, + versionNumber, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + dagId: string; + versionNumber?: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }), + queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }), + }); +/** + * Get Dag Stats + * Get Dag statistics. + * @param data The data for the request. + * @param data.dagIds + * @returns DagStatsCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagStatsServiceGetDagStats = ( + queryClient: QueryClient, + { + dagIds, + }: { + dagIds?: string[]; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }), + queryFn: () => DagStatsService.getDagStats({ dagIds }), + }); +/** + * Get Dag Reports + * Get DAG report. + * @param data The data for the request. + * @param data.subdir + * @returns unknown Successful Response + * @throws ApiError + */ +export const prefetchUseDagReportServiceGetDagReports = ( + queryClient: QueryClient, + { + subdir, + }: { + subdir: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }), + queryFn: () => DagReportService.getDagReports({ subdir }), + }); +/** + * Get Config + * @param data The data for the request. + * @param data.section + * @param data.accept + * @returns Config Successful Response + * @throws ApiError + */ +export const prefetchUseConfigServiceGetConfig = ( + queryClient: QueryClient, + { + accept, + section, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + section?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }), + queryFn: () => ConfigService.getConfig({ accept, section }), + }); +/** + * Get Config Value + * @param data The data for the request. + * @param data.section + * @param data.option + * @param data.accept + * @returns Config Successful Response + * @throws ApiError + */ +export const prefetchUseConfigServiceGetConfigValue = ( + queryClient: QueryClient, + { + accept, + option, + section, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + option: string; + section: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }), + queryFn: () => ConfigService.getConfigValue({ accept, option, section }), + }); +/** + * Get Configs + * Get configs for UI. + * @returns ConfigResponse Successful Response + * @throws ApiError + */ +export const prefetchUseConfigServiceGetConfigs = (queryClient: QueryClient) => + queryClient.prefetchQuery({ + queryKey: Common.UseConfigServiceGetConfigsKeyFn(), + queryFn: () => ConfigService.getConfigs(), + }); +/** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagWarningServiceListDagWarnings = ( + queryClient: QueryClient, + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ dagId, limit, offset, orderBy, warningType }), + queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }), + }); +/** + * Get Dags + * Get all DAGs. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.tagsMatchMode + * @param data.owners + * @param data.dagIdPattern + * @param data.dagDisplayNamePattern + * @param data.excludeStale + * @param data.paused + * @param data.lastDagRunState + * @param data.dagRunStartDateGte + * @param data.dagRunStartDateLte + * @param data.dagRunEndDateGte + * @param data.dagRunEndDateLte + * @param data.dagRunState + * @param data.orderBy + * @returns DAGCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagServiceGetDags = ( + queryClient: QueryClient, + { + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunEndDateGte?: string; + dagRunEndDateLte?: string; + dagRunStartDateGte?: string; + dagRunStartDateLte?: string; + dagRunState?: string[]; + excludeStale?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagServiceGetDagsKeyFn({ + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }), + queryFn: () => + DagService.getDags({ + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }), + }); +/** + * Get Dag + * Get basic information about a DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagServiceGetDag = ( + queryClient: QueryClient, + { + dagId, + }: { + dagId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }), + queryFn: () => DagService.getDag({ dagId }), + }); +/** + * Get Dag Details + * Get details of DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGDetailsResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagServiceGetDagDetails = ( + queryClient: QueryClient, + { + dagId, + }: { + dagId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }), + queryFn: () => DagService.getDagDetails({ dagId }), + }); +/** + * Get Dag Tags + * Get all DAG tags. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.tagNamePattern + * @returns DAGTagCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagServiceGetDagTags = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + tagNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + tagNamePattern?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }), + queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }), + }); +/** + * Recent Dag Runs + * Get recent DAG runs. + * @param data The data for the request. + * @param data.dagRunsLimit + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.tagsMatchMode + * @param data.owners + * @param data.dagIds + * @param data.dagIdPattern + * @param data.dagDisplayNamePattern + * @param data.excludeStale + * @param data.paused + * @param data.lastDagRunState + * @returns DAGWithLatestDagRunsCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagServiceRecentDagRuns = ( + queryClient: QueryClient, + { + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagIds?: string[]; + dagRunsLimit?: number; + excludeStale?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagServiceRecentDagRunsKeyFn({ + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }), + queryFn: () => + DagService.recentDagRuns({ + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }), + }); +/** + * Get Event Log + * @param data The data for the request. + * @param data.eventLogId + * @returns EventLogResponse Successful Response + * @throws ApiError + */ +export const prefetchUseEventLogServiceGetEventLog = ( + queryClient: QueryClient, + { + eventLogId, + }: { + eventLogId: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }), + queryFn: () => EventLogService.getEventLog({ eventLogId }), + }); +/** + * Get Event Logs + * Get all Event Logs. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.dagId + * @param data.taskId + * @param data.runId + * @param data.mapIndex + * @param data.tryNumber + * @param data.owner + * @param data.event + * @param data.excludedEvents + * @param data.includedEvents + * @param data.before + * @param data.after + * @returns EventLogCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseEventLogServiceGetEventLogs = ( + queryClient: QueryClient, + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }), + queryFn: () => + EventLogService.getEventLogs({ + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }), + }); +/** + * Get Extra Links + * Get extra links for task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns ExtraLinkCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseExtraLinksServiceGetExtraLinks = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), + queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Extra Links + * Get extra links for task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns ExtraLinkCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetExtraLinks = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }), + queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetTaskInstance = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + taskId, + }: { + dagId: string; + dagRunId: string; + taskId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }), + queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }), + }); +/** + * Get Mapped Task Instances + * Get list of mapped task instances. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.durationGte + * @param data.durationLte + * @param data.state + * @param data.pool + * @param data.queue + * @param data.executor + * @param data.versionNumber + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetMappedTaskInstances = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskId: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn({ + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }), + queryFn: () => + TaskInstanceService.getMappedTaskInstances({ + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }), + }); +/** + * Get Task Instance Dependencies + * Get dependencies blocking task from getting scheduled. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskDependencyCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndex = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + }), + queryFn: () => + TaskInstanceService.getTaskInstanceDependenciesByMapIndex({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Task Instance Dependencies + * Get dependencies blocking task from getting scheduled. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskDependencyCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetTaskInstanceDependencies = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + }), + queryFn: () => TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Task Instance Tries + * Get list of task instances history. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetTaskInstanceTries = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }), + queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Mapped Task Instance Tries + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetMappedTaskInstanceTries = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + }), + queryFn: () => TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Mapped Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetMappedTaskInstance = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ dagId, dagRunId, mapIndex, taskId }), + queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }), + }); +/** + * Get Task Instances + * Get list of task instances. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs + * and DAG runs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.durationGte + * @param data.durationLte + * @param data.taskDisplayNamePattern + * @param data.state + * @param data.pool + * @param data.queue + * @param data.executor + * @param data.versionNumber + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetTaskInstances = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskDisplayNamePattern?: string; + taskId?: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn({ + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }), + queryFn: () => + TaskInstanceService.getTaskInstances({ + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }), + }); +/** + * Get Task Instance Try Details + * Get task instance details by try number. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.taskTryNumber + * @param data.mapIndex + * @returns TaskInstanceHistoryResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetTaskInstanceTryDetails = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + taskTryNumber: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }), + queryFn: () => + TaskInstanceService.getTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }), + }); +/** + * Get Mapped Task Instance Try Details + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.taskTryNumber + * @param data.mapIndex + * @returns TaskInstanceHistoryResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetMappedTaskInstanceTryDetails = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + taskTryNumber: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }), + queryFn: () => + TaskInstanceService.getMappedTaskInstanceTryDetails({ + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }), + }); +/** + * Get Log + * Get logs for a specific task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.tryNumber + * @param data.fullContent + * @param data.mapIndex + * @param data.token + * @param data.accept + * @returns TaskInstancesLogResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetLog = ( + queryClient: QueryClient, + { + accept, + dagId, + dagRunId, + fullContent, + mapIndex, + taskId, + token, + tryNumber, + }: { + accept?: "application/json" | "*/*" | "application/x-ndjson"; + dagId: string; + dagRunId: string; + fullContent?: boolean; + mapIndex?: number; + taskId: string; + token?: string; + tryNumber: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetLogKeyFn({ + accept, + dagId, + dagRunId, + fullContent, + mapIndex, + taskId, + token, + tryNumber, + }), + queryFn: () => + TaskInstanceService.getLog({ + accept, + dagId, + dagRunId, + fullContent, + mapIndex, + taskId, + token, + tryNumber, + }), + }); +/** + * Get Import Error + * Get an import error. + * @param data The data for the request. + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response + * @throws ApiError + */ +export const prefetchUseImportErrorServiceGetImportError = ( + queryClient: QueryClient, + { + importErrorId, + }: { + importErrorId: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }), + queryFn: () => ImportErrorService.getImportError({ importErrorId }), + }); +/** + * Get Import Errors + * Get all import errors. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseImportErrorServiceGetImportErrors = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }), + queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }), + }); +/** + * Get Jobs + * Get all jobs. + * @param data The data for the request. + * @param data.isAlive + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.jobState + * @param data.jobType + * @param data.hostname + * @param data.executorClass + * @returns JobCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseJobServiceGetJobs = ( + queryClient: QueryClient, + { + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }: { + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string; + startDateGte?: string; + startDateLte?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseJobServiceGetJobsKeyFn({ + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }), + queryFn: () => + JobService.getJobs({ + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }), + }); +/** + * Get Plugins + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUsePluginServiceGetPlugins = ( + queryClient: QueryClient, + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }), + queryFn: () => PluginService.getPlugins({ limit, offset }), + }); +/** + * Get Pool + * Get a pool. + * @param data The data for the request. + * @param data.poolName + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const prefetchUsePoolServiceGetPool = ( + queryClient: QueryClient, + { + poolName, + }: { + poolName: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }), + queryFn: () => PoolService.getPool({ poolName }), + }); +/** + * Get Pools + * Get all pools entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.poolNamePattern + * @returns PoolCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUsePoolServiceGetPools = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + poolNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + poolNamePattern?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }), + queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }), + }); +/** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseProviderServiceGetProviders = ( + queryClient: QueryClient, + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }), + queryFn: () => ProviderService.getProviders({ limit, offset }), + }); +/** + * Get Xcom Entry + * Get an XCom entry. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.dagRunId + * @param data.xcomKey + * @param data.mapIndex + * @param data.deserialize + * @param data.stringify + * @returns unknown Successful Response + * @throws ApiError + */ +export const prefetchUseXcomServiceGetXcomEntry = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + deserialize, + mapIndex, + stringify, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseXcomServiceGetXcomEntryKeyFn({ + dagId, + dagRunId, + deserialize, + mapIndex, + stringify, + taskId, + xcomKey, + }), + queryFn: () => + XcomService.getXcomEntry({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }), + }); +/** + * Get Xcom Entries + * Get all XCom entries. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.xcomKey + * @param data.mapIndex + * @param data.limit + * @param data.offset + * @returns XComCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseXcomServiceGetXcomEntries = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }), + queryFn: () => XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }), + }); +/** + * Get Tasks + * Get tasks for DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.orderBy + * @returns TaskCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskServiceGetTasks = ( + queryClient: QueryClient, + { + dagId, + orderBy, + }: { + dagId: string; + orderBy?: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }), + queryFn: () => TaskService.getTasks({ dagId, orderBy }), + }); +/** + * Get Task + * Get simplified representation of a task. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @returns TaskResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskServiceGetTask = ( + queryClient: QueryClient, + { + dagId, + taskId, + }: { + dagId: string; + taskId: unknown; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }), + queryFn: () => TaskService.getTask({ dagId, taskId }), + }); +/** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const prefetchUseVariableServiceGetVariable = ( + queryClient: QueryClient, + { + variableKey, + }: { + variableKey: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }), + queryFn: () => VariableService.getVariable({ variableKey }), + }); +/** + * Get Variables + * Get all Variables entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.variableKeyPattern + * @returns VariableCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseVariableServiceGetVariables = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + variableKeyPattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + variableKeyPattern?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseVariableServiceGetVariablesKeyFn({ limit, offset, orderBy, variableKeyPattern }), + queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }), + }); +/** + * Get Dag Version + * Get one Dag Version. + * @param data The data for the request. + * @param data.dagId + * @param data.versionNumber + * @returns DagVersionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagVersionServiceGetDagVersion = ( + queryClient: QueryClient, + { + dagId, + versionNumber, + }: { + dagId: string; + versionNumber: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }), + queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }), + }); +/** + * Get Dag Versions + * Get all DAG Versions. + * + * This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.versionNumber + * @param data.bundleName + * @param data.bundleVersion + * @param data.orderBy + * @returns DAGVersionCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagVersionServiceGetDagVersions = ( + queryClient: QueryClient, + { + bundleName, + bundleVersion, + dagId, + limit, + offset, + orderBy, + versionNumber, + }: { + bundleName?: string; + bundleVersion?: string; + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + versionNumber?: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ + bundleName, + bundleVersion, + dagId, + limit, + offset, + orderBy, + versionNumber, + }), + queryFn: () => + DagVersionService.getDagVersions({ + bundleName, + bundleVersion, + dagId, + limit, + offset, + orderBy, + versionNumber, + }), + }); +/** + * Get Health + * @returns HealthInfoResponse Successful Response + * @throws ApiError + */ +export const prefetchUseMonitorServiceGetHealth = (queryClient: QueryClient) => + queryClient.prefetchQuery({ + queryKey: Common.UseMonitorServiceGetHealthKeyFn(), + queryFn: () => MonitorService.getHealth(), + }); +/** + * Get Version + * Get version information. + * @returns VersionInfo Successful Response + * @throws ApiError + */ +export const prefetchUseVersionServiceGetVersion = (queryClient: QueryClient) => + queryClient.prefetchQuery({ + queryKey: Common.UseVersionServiceGetVersionKeyFn(), + queryFn: () => VersionService.getVersion(), + }); +/** + * Login + * Redirect to the login URL depending on the AuthManager configured. + * @param data The data for the request. + * @param data.next + * @returns unknown Successful Response + * @throws ApiError + */ +export const prefetchUseLoginServiceLogin = ( + queryClient: QueryClient, + { + next, + }: { + next?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseLoginServiceLoginKeyFn({ next }), + queryFn: () => LoginService.login({ next }), + }); +/** + * Logout + * Logout the user. + * @param data The data for the request. + * @param data.next + * @returns unknown Successful Response + * @throws ApiError + */ +export const prefetchUseLoginServiceLogout = ( + queryClient: QueryClient, + { + next, + }: { + next?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseLoginServiceLogoutKeyFn({ next }), + queryFn: () => LoginService.logout({ next }), + }); +/** + * Get Auth Menus + * @returns MenuItemCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseAuthLinksServiceGetAuthMenus = (queryClient: QueryClient) => + queryClient.prefetchQuery({ + queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(), + queryFn: () => AuthLinksService.getAuthMenus(), + }); +/** + * Get Dependencies + * Dependencies graph. + * @param data The data for the request. + * @param data.nodeId + * @returns BaseGraphResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDependenciesServiceGetDependencies = ( + queryClient: QueryClient, + { + nodeId, + }: { + nodeId?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }), + queryFn: () => DependenciesService.getDependencies({ nodeId }), + }); +/** + * Historical Metrics + * Return cluster activity historical metrics. + * @param data The data for the request. + * @param data.startDate + * @param data.endDate + * @returns HistoricalMetricDataResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDashboardServiceHistoricalMetrics = ( + queryClient: QueryClient, + { + endDate, + startDate, + }: { + endDate?: string; + startDate: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }), + queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }), + }); +/** + * Dag Stats + * Return basic DAG stats with counts of DAGs in various states. + * @returns DashboardDagStatsResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDashboardServiceDagStats = (queryClient: QueryClient) => + queryClient.prefetchQuery({ + queryKey: Common.UseDashboardServiceDagStatsKeyFn(), + queryFn: () => DashboardService.dagStats(), + }); +/** + * Structure Data + * Get Structure Data. + * @param data The data for the request. + * @param data.dagId + * @param data.includeUpstream + * @param data.includeDownstream + * @param data.root + * @param data.externalDependencies + * @param data.versionNumber + * @returns StructureDataResponse Successful Response + * @throws ApiError + */ +export const prefetchUseStructureServiceStructureData = ( + queryClient: QueryClient, + { + dagId, + externalDependencies, + includeDownstream, + includeUpstream, + root, + versionNumber, + }: { + dagId: string; + externalDependencies?: boolean; + includeDownstream?: boolean; + includeUpstream?: boolean; + root?: string; + versionNumber?: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseStructureServiceStructureDataKeyFn({ + dagId, + externalDependencies, + includeDownstream, + includeUpstream, + root, + versionNumber, + }), + queryFn: () => + StructureService.structureData({ + dagId, + externalDependencies, + includeDownstream, + includeUpstream, + root, + versionNumber, + }), + }); +/** + * Grid Data + * Return grid data. + * @param data The data for the request. + * @param data.dagId + * @param data.includeUpstream + * @param data.includeDownstream + * @param data.root + * @param data.offset + * @param data.runType + * @param data.state + * @param data.limit + * @param data.orderBy + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @returns GridResponse Successful Response + * @throws ApiError + */ +export const prefetchUseGridServiceGridData = ( + queryClient: QueryClient, + { + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }: { + dagId: string; + includeDownstream?: boolean; + includeUpstream?: boolean; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + root?: string; + runAfterGte?: string; + runAfterLte?: string; + runType?: string[]; + state?: string[]; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseGridServiceGridDataKeyFn({ + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }), + queryFn: () => + GridService.gridData({ + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }), + }); diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts index 9e6f49b5f323d..30b49d52aa330 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/queries.ts @@ -1,1935 +1,4747 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 - +// generated with @7nohe/openapi-react-query-codegen@1.6.2 import { UseMutationOptions, UseQueryOptions, useMutation, useQuery } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, ConfigService, ConnectionService, DagParsingService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; -import { BackfillPostBody, BulkBody_ConnectionBody_, BulkBody_PoolBody_, BulkBody_VariableBody_, ClearTaskInstancesBody, ConnectionBody, CreateAssetEventsBody, DAGPatchBody, DAGRunClearBody, DAGRunPatchBody, DAGRunsBatchBody, DagRunState, DagWarningType, PatchTaskInstanceBody, PoolBody, PoolPatchBody, TaskInstancesBatchBody, TriggerDAGRunPostBody, VariableBody, XComCreateBody, XComUpdateBody } from "../requests/types.gen"; + +import { + AssetService, + AuthLinksService, + BackfillService, + ConfigService, + ConnectionService, + DagParsingService, + DagReportService, + DagRunService, + DagService, + DagSourceService, + DagStatsService, + DagVersionService, + DagWarningService, + DashboardService, + DependenciesService, + EventLogService, + ExtraLinksService, + GridService, + ImportErrorService, + JobService, + LoginService, + MonitorService, + PluginService, + PoolService, + ProviderService, + StructureService, + TaskInstanceService, + TaskService, + VariableService, + VersionService, + XcomService, +} from "../requests/services.gen"; +import { + BackfillPostBody, + BulkBody_ConnectionBody_, + BulkBody_PoolBody_, + BulkBody_VariableBody_, + ClearTaskInstancesBody, + ConnectionBody, + CreateAssetEventsBody, + DAGPatchBody, + DAGRunClearBody, + DAGRunPatchBody, + DAGRunsBatchBody, + DagRunState, + DagWarningType, + PatchTaskInstanceBody, + PoolBody, + PoolPatchBody, + TaskInstancesBatchBody, + TriggerDAGRunPostBody, + VariableBody, + XComCreateBody, + XComUpdateBody, +} from "../requests/types.gen"; import * as Common from "./common"; + +/** + * Get Assets + * Get assets. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.namePattern + * @param data.uriPattern + * @param data.dagIds + * @param data.onlyActive + * @param data.orderBy + * @returns AssetCollectionResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetAssets = < + TData = Common.AssetServiceGetAssetsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagIds, + limit, + namePattern, + offset, + onlyActive, + orderBy, + uriPattern, + }: { + dagIds?: string[]; + limit?: number; + namePattern?: string; + offset?: number; + onlyActive?: boolean; + orderBy?: string; + uriPattern?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseAssetServiceGetAssetsKeyFn( + { dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }, + queryKey, + ), + queryFn: () => + AssetService.getAssets({ + dagIds, + limit, + namePattern, + offset, + onlyActive, + orderBy, + uriPattern, + }) as TData, + ...options, + }); +/** + * Get Asset Aliases + * Get asset aliases. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.namePattern + * @param data.orderBy + * @returns AssetAliasCollectionResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetAssetAliases = < + TData = Common.AssetServiceGetAssetAliasesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + namePattern, + offset, + orderBy, + }: { + limit?: number; + namePattern?: string; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }, queryKey), + queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Asset Alias + * Get an asset alias. + * @param data The data for the request. + * @param data.assetAliasId + * @returns unknown Successful Response + * @throws ApiError + */ +export const useAssetServiceGetAssetAlias = < + TData = Common.AssetServiceGetAssetAliasDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + assetAliasId, + }: { + assetAliasId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }, queryKey), + queryFn: () => AssetService.getAssetAlias({ assetAliasId }) as TData, + ...options, + }); +/** + * Get Asset Events + * Get asset events. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.assetId + * @param data.sourceDagId + * @param data.sourceTaskId + * @param data.sourceRunId + * @param data.sourceMapIndex + * @param data.timestampGte + * @param data.timestampLte + * @returns AssetEventCollectionResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetAssetEvents = < + TData = Common.AssetServiceGetAssetEventsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }: { + assetId?: number; + limit?: number; + offset?: number; + orderBy?: string; + sourceDagId?: string; + sourceMapIndex?: number; + sourceRunId?: string; + sourceTaskId?: string; + timestampGte?: string; + timestampLte?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseAssetServiceGetAssetEventsKeyFn( + { + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }, + queryKey, + ), + queryFn: () => + AssetService.getAssetEvents({ + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }) as TData, + ...options, + }); +/** + * Get Asset Queued Events + * Get queued asset events for an asset. + * @param data The data for the request. + * @param data.assetId + * @param data.before + * @returns QueuedEventCollectionResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetAssetQueuedEvents = < + TData = Common.AssetServiceGetAssetQueuedEventsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + assetId, + before, + }: { + assetId: number; + before?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }, queryKey), + queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }) as TData, + ...options, + }); +/** + * Get Asset + * Get an asset. + * @param data The data for the request. + * @param data.assetId + * @returns AssetResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetAsset = < + TData = Common.AssetServiceGetAssetDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + assetId, + }: { + assetId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }, queryKey), + queryFn: () => AssetService.getAsset({ assetId }) as TData, + ...options, + }); +/** + * Get Dag Asset Queued Events + * Get queued asset events for a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.before + * @returns QueuedEventCollectionResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetDagAssetQueuedEvents = < + TData = Common.AssetServiceGetDagAssetQueuedEventsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + before, + dagId, + }: { + before?: string; + dagId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }, queryKey), + queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }) as TData, + ...options, + }); +/** + * Get Dag Asset Queued Event + * Get a queued asset event for a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.assetId + * @param data.before + * @returns QueuedEventResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetDagAssetQueuedEvent = < + TData = Common.AssetServiceGetDagAssetQueuedEventDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + assetId, + before, + dagId, + }: { + assetId: number; + before?: string; + dagId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }, queryKey), + queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }) as TData, + ...options, + }); +/** + * Next Run Assets + * @param data The data for the request. + * @param data.dagId + * @returns unknown Successful Response + * @throws ApiError + */ +export const useAssetServiceNextRunAssets = < + TData = Common.AssetServiceNextRunAssetsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + }: { + dagId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }, queryKey), + queryFn: () => AssetService.nextRunAssets({ dagId }) as TData, + ...options, + }); +/** + * List Backfills + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns BackfillCollectionResponse Successful Response + * @throws ApiError + */ +export const useBackfillServiceListBackfills = < + TData = Common.BackfillServiceListBackfillsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + limit, + offset, + orderBy, + }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }, queryKey), + queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns BackfillResponse Successful Response + * @throws ApiError + */ +export const useBackfillServiceGetBackfill = < + TData = Common.BackfillServiceGetBackfillDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + backfillId, + }: { + backfillId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }, queryKey), + queryFn: () => BackfillService.getBackfill({ backfillId }) as TData, + ...options, + }); +/** + * List Backfills + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.dagId + * @param data.active + * @returns BackfillCollectionResponse Successful Response + * @throws ApiError + */ +export const useBackfillServiceListBackfills1 = < + TData = Common.BackfillServiceListBackfills1DefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + active, + dagId, + limit, + offset, + orderBy, + }: { + active?: boolean; + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseBackfillServiceListBackfills1KeyFn( + { active, dagId, limit, offset, orderBy }, + queryKey, + ), + queryFn: () => BackfillService.listBackfills1({ active, dagId, limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Connection + * Get a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ +export const useConnectionServiceGetConnection = < + TData = Common.ConnectionServiceGetConnectionDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + connectionId, + }: { + connectionId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }, queryKey), + queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, + ...options, + }); +/** + * Get Connections + * Get all connection entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.connectionIdPattern + * @returns ConnectionCollectionResponse Successful Response + * @throws ApiError + */ +export const useConnectionServiceGetConnections = < + TData = Common.ConnectionServiceGetConnectionsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + connectionIdPattern, + limit, + offset, + orderBy, + }: { + connectionIdPattern?: string; + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseConnectionServiceGetConnectionsKeyFn( + { connectionIdPattern, limit, offset, orderBy }, + queryKey, + ), + queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Hook Meta Data + * Retrieve information about available connection types (hook classes) and their parameters. + * @returns ConnectionHookMetaData Successful Response + * @throws ApiError + */ +export const useConnectionServiceHookMetaData = < + TData = Common.ConnectionServiceHookMetaDataDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(queryKey), + queryFn: () => ConnectionService.hookMetaData() as TData, + ...options, + }); +/** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceGetDagRun = < + TData = Common.DagRunServiceGetDagRunDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }, queryKey), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, + ...options, + }); +/** + * Get Upstream Asset Events + * If dag run is asset-triggered, return the asset events that triggered it. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns AssetEventCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceGetUpstreamAssetEvents = < + TData = Common.DagRunServiceGetUpstreamAssetEventsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }, queryKey), + queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }) as TData, + ...options, + }); +/** + * Get Dag Runs + * Get all DAG Runs. + * + * This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.runType + * @param data.state + * @param data.orderBy + * @returns DAGRunCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceGetDagRuns = < + TData = Common.DagRunServiceGetDagRunsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }: { + dagId: string; + endDateGte?: string; + endDateLte?: string; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + runAfterGte?: string; + runAfterLte?: string; + runType?: string[]; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + updatedAtGte?: string; + updatedAtLte?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagRunServiceGetDagRunsKeyFn( + { + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }, + queryKey, + ), + queryFn: () => + DagRunService.getDagRuns({ + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }) as TData, + ...options, + }); +/** + * Get Dag Source + * Get source code using file token. + * @param data The data for the request. + * @param data.dagId + * @param data.versionNumber + * @param data.accept + * @returns DAGSourceResponse Successful Response + * @throws ApiError + */ +export const useDagSourceServiceGetDagSource = < + TData = Common.DagSourceServiceGetDagSourceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + accept, + dagId, + versionNumber, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + dagId: string; + versionNumber?: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }, queryKey), + queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }) as TData, + ...options, + }); +/** + * Get Dag Stats + * Get Dag statistics. + * @param data The data for the request. + * @param data.dagIds + * @returns DagStatsCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagStatsServiceGetDagStats = < + TData = Common.DagStatsServiceGetDagStatsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagIds, + }: { + dagIds?: string[]; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }, queryKey), + queryFn: () => DagStatsService.getDagStats({ dagIds }) as TData, + ...options, + }); +/** + * Get Dag Reports + * Get DAG report. + * @param data The data for the request. + * @param data.subdir + * @returns unknown Successful Response + * @throws ApiError + */ +export const useDagReportServiceGetDagReports = < + TData = Common.DagReportServiceGetDagReportsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + subdir, + }: { + subdir: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }, queryKey), + queryFn: () => DagReportService.getDagReports({ subdir }) as TData, + ...options, + }); +/** + * Get Config + * @param data The data for the request. + * @param data.section + * @param data.accept + * @returns Config Successful Response + * @throws ApiError + */ +export const useConfigServiceGetConfig = < + TData = Common.ConfigServiceGetConfigDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + accept, + section, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + section?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }, queryKey), + queryFn: () => ConfigService.getConfig({ accept, section }) as TData, + ...options, + }); +/** + * Get Config Value + * @param data The data for the request. + * @param data.section + * @param data.option + * @param data.accept + * @returns Config Successful Response + * @throws ApiError + */ +export const useConfigServiceGetConfigValue = < + TData = Common.ConfigServiceGetConfigValueDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + accept, + option, + section, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + option: string; + section: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }, queryKey), + queryFn: () => ConfigService.getConfigValue({ accept, option, section }) as TData, + ...options, + }); +/** + * Get Configs + * Get configs for UI. + * @returns ConfigResponse Successful Response + * @throws ApiError + */ +export const useConfigServiceGetConfigs = < + TData = Common.ConfigServiceGetConfigsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseConfigServiceGetConfigsKeyFn(queryKey), + queryFn: () => ConfigService.getConfigs() as TData, + ...options, + }); +/** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagWarningServiceListDagWarnings = < + TData = Common.DagWarningServiceListDagWarningsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn( + { dagId, limit, offset, orderBy, warningType }, + queryKey, + ), + queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }) as TData, + ...options, + }); +/** + * Get Dags + * Get all DAGs. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.tagsMatchMode + * @param data.owners + * @param data.dagIdPattern + * @param data.dagDisplayNamePattern + * @param data.excludeStale + * @param data.paused + * @param data.lastDagRunState + * @param data.dagRunStartDateGte + * @param data.dagRunStartDateLte + * @param data.dagRunEndDateGte + * @param data.dagRunEndDateLte + * @param data.dagRunState + * @param data.orderBy + * @returns DAGCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagServiceGetDags = < + TData = Common.DagServiceGetDagsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunEndDateGte?: string; + dagRunEndDateLte?: string; + dagRunStartDateGte?: string; + dagRunStartDateLte?: string; + dagRunState?: string[]; + excludeStale?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagServiceGetDagsKeyFn( + { + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }, + queryKey, + ), + queryFn: () => + DagService.getDags({ + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }) as TData, + ...options, + }); +/** + * Get Dag + * Get basic information about a DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGResponse Successful Response + * @throws ApiError + */ +export const useDagServiceGetDag = < + TData = Common.DagServiceGetDagDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + }: { + dagId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }, queryKey), + queryFn: () => DagService.getDag({ dagId }) as TData, + ...options, + }); +/** + * Get Dag Details + * Get details of DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGDetailsResponse Successful Response + * @throws ApiError + */ +export const useDagServiceGetDagDetails = < + TData = Common.DagServiceGetDagDetailsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + }: { + dagId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }, queryKey), + queryFn: () => DagService.getDagDetails({ dagId }) as TData, + ...options, + }); +/** + * Get Dag Tags + * Get all DAG tags. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.tagNamePattern + * @returns DAGTagCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagServiceGetDagTags = < + TData = Common.DagServiceGetDagTagsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + tagNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + tagNamePattern?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }, queryKey), + queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }) as TData, + ...options, + }); +/** + * Recent Dag Runs + * Get recent DAG runs. + * @param data The data for the request. + * @param data.dagRunsLimit + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.tagsMatchMode + * @param data.owners + * @param data.dagIds + * @param data.dagIdPattern + * @param data.dagDisplayNamePattern + * @param data.excludeStale + * @param data.paused + * @param data.lastDagRunState + * @returns DAGWithLatestDagRunsCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagServiceRecentDagRuns = < + TData = Common.DagServiceRecentDagRunsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagIds?: string[]; + dagRunsLimit?: number; + excludeStale?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagServiceRecentDagRunsKeyFn( + { + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }, + queryKey, + ), + queryFn: () => + DagService.recentDagRuns({ + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }) as TData, + ...options, + }); +/** + * Get Event Log + * @param data The data for the request. + * @param data.eventLogId + * @returns EventLogResponse Successful Response + * @throws ApiError + */ +export const useEventLogServiceGetEventLog = < + TData = Common.EventLogServiceGetEventLogDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + eventLogId, + }: { + eventLogId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }, queryKey), + queryFn: () => EventLogService.getEventLog({ eventLogId }) as TData, + ...options, + }); +/** + * Get Event Logs + * Get all Event Logs. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.dagId + * @param data.taskId + * @param data.runId + * @param data.mapIndex + * @param data.tryNumber + * @param data.owner + * @param data.event + * @param data.excludedEvents + * @param data.includedEvents + * @param data.before + * @param data.after + * @returns EventLogCollectionResponse Successful Response + * @throws ApiError + */ +export const useEventLogServiceGetEventLogs = < + TData = Common.EventLogServiceGetEventLogsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseEventLogServiceGetEventLogsKeyFn( + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }, + queryKey, + ), + queryFn: () => + EventLogService.getEventLogs({ + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }) as TData, + ...options, + }); +/** + * Get Extra Links + * Get extra links for task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns ExtraLinkCollectionResponse Successful Response + * @throws ApiError + */ +export const useExtraLinksServiceGetExtraLinks = < + TData = Common.ExtraLinksServiceGetExtraLinksDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), + queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, + ...options, + }); +/** + * Get Extra Links + * Get extra links for task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns ExtraLinkCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetExtraLinks = < + TData = Common.TaskInstanceServiceGetExtraLinksDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, + ...options, + }); +/** + * Get Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstance = < + TData = Common.TaskInstanceServiceGetTaskInstanceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + taskId, + }: { + dagId: string; + dagRunId: string; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }, queryKey), + queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) as TData, + ...options, + }); +/** + * Get Mapped Task Instances + * Get list of mapped task instances. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.durationGte + * @param data.durationLte + * @param data.state + * @param data.pool + * @param data.queue + * @param data.executor + * @param data.versionNumber + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetMappedTaskInstances = < + TData = Common.TaskInstanceServiceGetMappedTaskInstancesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskId: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn( + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getMappedTaskInstances({ + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }) as TData, + ...options, + }); +/** + * Get Task Instance Dependencies + * Get dependencies blocking task from getting scheduled. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskDependencyCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstanceDependenciesByMapIndex = < + TData = Common.TaskInstanceServiceGetTaskInstanceDependenciesByMapIndexDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getTaskInstanceDependenciesByMapIndex({ + dagId, + dagRunId, + mapIndex, + taskId, + }) as TData, + ...options, + }); +/** + * Get Task Instance Dependencies + * Get dependencies blocking task from getting scheduled. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskDependencyCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstanceDependencies = < + TData = Common.TaskInstanceServiceGetTaskInstanceDependenciesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }) as TData, + ...options, + }); +/** + * Get Task Instance Tries + * Get list of task instances history. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstanceTries = < + TData = Common.TaskInstanceServiceGetTaskInstanceTriesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, + ...options, + }); +/** + * Get Mapped Task Instance Tries + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetMappedTaskInstanceTries = < + TData = Common.TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, + ...options, + }); +/** + * Get Mapped Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetMappedTaskInstance = < + TData = Common.TaskInstanceServiceGetMappedTaskInstanceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }) as TData, + ...options, + }); +/** + * Get Task Instances + * Get list of task instances. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs + * and DAG runs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.durationGte + * @param data.durationLte + * @param data.taskDisplayNamePattern + * @param data.state + * @param data.pool + * @param data.queue + * @param data.executor + * @param data.versionNumber + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstances = < + TData = Common.TaskInstanceServiceGetTaskInstancesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskDisplayNamePattern?: string; + taskId?: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn( + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getTaskInstances({ + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }) as TData, + ...options, + }); +/** + * Get Task Instance Try Details + * Get task instance details by try number. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.taskTryNumber + * @param data.mapIndex + * @returns TaskInstanceHistoryResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstanceTryDetails = < + TData = Common.TaskInstanceServiceGetTaskInstanceTryDetailsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + taskTryNumber: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn( + { dagId, dagRunId, mapIndex, taskId, taskTryNumber }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getTaskInstanceTryDetails({ + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }) as TData, + ...options, + }); +/** + * Get Mapped Task Instance Try Details + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.taskTryNumber + * @param data.mapIndex + * @returns TaskInstanceHistoryResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetMappedTaskInstanceTryDetails = < + TData = Common.TaskInstanceServiceGetMappedTaskInstanceTryDetailsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + taskTryNumber: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn( + { dagId, dagRunId, mapIndex, taskId, taskTryNumber }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getMappedTaskInstanceTryDetails({ + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }) as TData, + ...options, + }); +/** + * Get Log + * Get logs for a specific task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.tryNumber + * @param data.fullContent + * @param data.mapIndex + * @param data.token + * @param data.accept + * @returns TaskInstancesLogResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetLog = < + TData = Common.TaskInstanceServiceGetLogDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + accept, + dagId, + dagRunId, + fullContent, + mapIndex, + taskId, + token, + tryNumber, + }: { + accept?: "application/json" | "*/*" | "application/x-ndjson"; + dagId: string; + dagRunId: string; + fullContent?: boolean; + mapIndex?: number; + taskId: string; + token?: string; + tryNumber: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetLogKeyFn( + { accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getLog({ + accept, + dagId, + dagRunId, + fullContent, + mapIndex, + taskId, + token, + tryNumber, + }) as TData, + ...options, + }); +/** + * Get Import Error + * Get an import error. + * @param data The data for the request. + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response + * @throws ApiError + */ +export const useImportErrorServiceGetImportError = < + TData = Common.ImportErrorServiceGetImportErrorDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + importErrorId, + }: { + importErrorId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }, queryKey), + queryFn: () => ImportErrorService.getImportError({ importErrorId }) as TData, + ...options, + }); +/** + * Get Import Errors + * Get all import errors. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response + * @throws ApiError + */ +export const useImportErrorServiceGetImportErrors = < + TData = Common.ImportErrorServiceGetImportErrorsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }, queryKey), + queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Jobs + * Get all jobs. + * @param data The data for the request. + * @param data.isAlive + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.jobState + * @param data.jobType + * @param data.hostname + * @param data.executorClass + * @returns JobCollectionResponse Successful Response + * @throws ApiError + */ +export const useJobServiceGetJobs = < + TData = Common.JobServiceGetJobsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }: { + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string; + startDateGte?: string; + startDateLte?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseJobServiceGetJobsKeyFn( + { + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }, + queryKey, + ), + queryFn: () => + JobService.getJobs({ + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }) as TData, + ...options, + }); +/** + * Get Plugins + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response + * @throws ApiError + */ +export const usePluginServiceGetPlugins = < + TData = Common.PluginServiceGetPluginsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }, queryKey), + queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, + ...options, + }); +/** + * Get Pool + * Get a pool. + * @param data The data for the request. + * @param data.poolName + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const usePoolServiceGetPool = < + TData = Common.PoolServiceGetPoolDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + poolName, + }: { + poolName: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), + queryFn: () => PoolService.getPool({ poolName }) as TData, + ...options, + }); +/** + * Get Pools + * Get all pools entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.poolNamePattern + * @returns PoolCollectionResponse Successful Response + * @throws ApiError + */ +export const usePoolServiceGetPools = < + TData = Common.PoolServiceGetPoolsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + poolNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + poolNamePattern?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }, queryKey), + queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }) as TData, + ...options, + }); +/** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ +export const useProviderServiceGetProviders = < + TData = Common.ProviderServiceGetProvidersDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }, queryKey), + queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, + ...options, + }); +/** + * Get Xcom Entry + * Get an XCom entry. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.dagRunId + * @param data.xcomKey + * @param data.mapIndex + * @param data.deserialize + * @param data.stringify + * @returns unknown Successful Response + * @throws ApiError + */ +export const useXcomServiceGetXcomEntry = < + TData = Common.XcomServiceGetXcomEntryDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + deserialize, + mapIndex, + stringify, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseXcomServiceGetXcomEntryKeyFn( + { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, + queryKey, + ), + queryFn: () => + XcomService.getXcomEntry({ + dagId, + dagRunId, + deserialize, + mapIndex, + stringify, + taskId, + xcomKey, + }) as TData, + ...options, + }); +/** + * Get Xcom Entries + * Get all XCom entries. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.xcomKey + * @param data.mapIndex + * @param data.limit + * @param data.offset + * @returns XComCollectionResponse Successful Response + * @throws ApiError + */ +export const useXcomServiceGetXcomEntries = < + TData = Common.XcomServiceGetXcomEntriesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn( + { dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }, + queryKey, + ), + queryFn: () => + XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }) as TData, + ...options, + }); +/** + * Get Tasks + * Get tasks for DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.orderBy + * @returns TaskCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskServiceGetTasks = < + TData = Common.TaskServiceGetTasksDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + orderBy, + }: { + dagId: string; + orderBy?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }, queryKey), + queryFn: () => TaskService.getTasks({ dagId, orderBy }) as TData, + ...options, + }); +/** + * Get Task + * Get simplified representation of a task. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @returns TaskResponse Successful Response + * @throws ApiError + */ +export const useTaskServiceGetTask = < + TData = Common.TaskServiceGetTaskDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + taskId, + }: { + dagId: string; + taskId: unknown; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }, queryKey), + queryFn: () => TaskService.getTask({ dagId, taskId }) as TData, + ...options, + }); +/** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceGetVariable = < + TData = Common.VariableServiceGetVariableDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + variableKey, + }: { + variableKey: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }, queryKey), + queryFn: () => VariableService.getVariable({ variableKey }) as TData, + ...options, + }); +/** + * Get Variables + * Get all Variables entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.variableKeyPattern + * @returns VariableCollectionResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceGetVariables = < + TData = Common.VariableServiceGetVariablesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + variableKeyPattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + variableKeyPattern?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseVariableServiceGetVariablesKeyFn( + { limit, offset, orderBy, variableKeyPattern }, + queryKey, + ), + queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }) as TData, + ...options, + }); +/** + * Get Dag Version + * Get one Dag Version. + * @param data The data for the request. + * @param data.dagId + * @param data.versionNumber + * @returns DagVersionResponse Successful Response + * @throws ApiError + */ +export const useDagVersionServiceGetDagVersion = < + TData = Common.DagVersionServiceGetDagVersionDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + versionNumber, + }: { + dagId: string; + versionNumber: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }, queryKey), + queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }) as TData, + ...options, + }); +/** + * Get Dag Versions + * Get all DAG Versions. + * + * This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.versionNumber + * @param data.bundleName + * @param data.bundleVersion + * @param data.orderBy + * @returns DAGVersionCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagVersionServiceGetDagVersions = < + TData = Common.DagVersionServiceGetDagVersionsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + bundleName, + bundleVersion, + dagId, + limit, + offset, + orderBy, + versionNumber, + }: { + bundleName?: string; + bundleVersion?: string; + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + versionNumber?: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn( + { bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, + queryKey, + ), + queryFn: () => + DagVersionService.getDagVersions({ + bundleName, + bundleVersion, + dagId, + limit, + offset, + orderBy, + versionNumber, + }) as TData, + ...options, + }); +/** + * Get Health + * @returns HealthInfoResponse Successful Response + * @throws ApiError + */ +export const useMonitorServiceGetHealth = < + TData = Common.MonitorServiceGetHealthDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), + queryFn: () => MonitorService.getHealth() as TData, + ...options, + }); +/** + * Get Version + * Get version information. + * @returns VersionInfo Successful Response + * @throws ApiError + */ +export const useVersionServiceGetVersion = < + TData = Common.VersionServiceGetVersionDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), + queryFn: () => VersionService.getVersion() as TData, + ...options, + }); +/** + * Login + * Redirect to the login URL depending on the AuthManager configured. + * @param data The data for the request. + * @param data.next + * @returns unknown Successful Response + * @throws ApiError + */ +export const useLoginServiceLogin = < + TData = Common.LoginServiceLoginDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + next, + }: { + next?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseLoginServiceLoginKeyFn({ next }, queryKey), + queryFn: () => LoginService.login({ next }) as TData, + ...options, + }); +/** + * Logout + * Logout the user. + * @param data The data for the request. + * @param data.next + * @returns unknown Successful Response + * @throws ApiError + */ +export const useLoginServiceLogout = < + TData = Common.LoginServiceLogoutDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + next, + }: { + next?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseLoginServiceLogoutKeyFn({ next }, queryKey), + queryFn: () => LoginService.logout({ next }) as TData, + ...options, + }); +/** + * Get Auth Menus + * @returns MenuItemCollectionResponse Successful Response + * @throws ApiError + */ +export const useAuthLinksServiceGetAuthMenus = < + TData = Common.AuthLinksServiceGetAuthMenusDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(queryKey), + queryFn: () => AuthLinksService.getAuthMenus() as TData, + ...options, + }); +/** + * Get Dependencies + * Dependencies graph. + * @param data The data for the request. + * @param data.nodeId + * @returns BaseGraphResponse Successful Response + * @throws ApiError + */ +export const useDependenciesServiceGetDependencies = < + TData = Common.DependenciesServiceGetDependenciesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + nodeId, + }: { + nodeId?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }, queryKey), + queryFn: () => DependenciesService.getDependencies({ nodeId }) as TData, + ...options, + }); +/** + * Historical Metrics + * Return cluster activity historical metrics. + * @param data The data for the request. + * @param data.startDate + * @param data.endDate + * @returns HistoricalMetricDataResponse Successful Response + * @throws ApiError + */ +export const useDashboardServiceHistoricalMetrics = < + TData = Common.DashboardServiceHistoricalMetricsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + endDate, + startDate, + }: { + endDate?: string; + startDate: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }, queryKey), + queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }) as TData, + ...options, + }); +/** + * Dag Stats + * Return basic DAG stats with counts of DAGs in various states. + * @returns DashboardDagStatsResponse Successful Response + * @throws ApiError + */ +export const useDashboardServiceDagStats = < + TData = Common.DashboardServiceDagStatsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDashboardServiceDagStatsKeyFn(queryKey), + queryFn: () => DashboardService.dagStats() as TData, + ...options, + }); +/** + * Structure Data + * Get Structure Data. + * @param data The data for the request. + * @param data.dagId + * @param data.includeUpstream + * @param data.includeDownstream + * @param data.root + * @param data.externalDependencies + * @param data.versionNumber + * @returns StructureDataResponse Successful Response + * @throws ApiError + */ +export const useStructureServiceStructureData = < + TData = Common.StructureServiceStructureDataDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + externalDependencies, + includeDownstream, + includeUpstream, + root, + versionNumber, + }: { + dagId: string; + externalDependencies?: boolean; + includeDownstream?: boolean; + includeUpstream?: boolean; + root?: string; + versionNumber?: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseStructureServiceStructureDataKeyFn( + { dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }, + queryKey, + ), + queryFn: () => + StructureService.structureData({ + dagId, + externalDependencies, + includeDownstream, + includeUpstream, + root, + versionNumber, + }) as TData, + ...options, + }); +/** + * Grid Data + * Return grid data. + * @param data The data for the request. + * @param data.dagId + * @param data.includeUpstream + * @param data.includeDownstream + * @param data.root + * @param data.offset + * @param data.runType + * @param data.state + * @param data.limit + * @param data.orderBy + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @returns GridResponse Successful Response + * @throws ApiError + */ +export const useGridServiceGridData = < + TData = Common.GridServiceGridDataDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }: { + dagId: string; + includeDownstream?: boolean; + includeUpstream?: boolean; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + root?: string; + runAfterGte?: string; + runAfterLte?: string; + runType?: string[]; + state?: string[]; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseGridServiceGridDataKeyFn( + { + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }, + queryKey, + ), + queryFn: () => + GridService.gridData({ + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }) as TData, + ...options, + }); +/** + * Create Asset Event + * Create asset events. + * @param data The data for the request. + * @param data.requestBody + * @returns AssetEventResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceCreateAssetEvent = < + TData = Common.AssetServiceCreateAssetEventMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: CreateAssetEventsBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: CreateAssetEventsBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + AssetService.createAssetEvent({ requestBody }) as unknown as Promise, + ...options, + }); +/** + * Materialize Asset + * Materialize an asset by triggering a DAG run that produces it. + * @param data The data for the request. + * @param data.assetId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceMaterializeAsset = < + TData = Common.AssetServiceMaterializeAssetMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + assetId: number; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + assetId: number; + }, + TContext + >({ + mutationFn: ({ assetId }) => AssetService.materializeAsset({ assetId }) as unknown as Promise, + ...options, + }); +/** + * Create Backfill + * @param data The data for the request. + * @param data.requestBody + * @returns BackfillResponse Successful Response + * @throws ApiError + */ +export const useBackfillServiceCreateBackfill = < + TData = Common.BackfillServiceCreateBackfillMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: BackfillPostBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: BackfillPostBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + BackfillService.createBackfill({ requestBody }) as unknown as Promise, + ...options, + }); +/** + * Create Backfill Dry Run + * @param data The data for the request. + * @param data.requestBody + * @returns DryRunBackfillCollectionResponse Successful Response + * @throws ApiError + */ +export const useBackfillServiceCreateBackfillDryRun = < + TData = Common.BackfillServiceCreateBackfillDryRunMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: BackfillPostBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: BackfillPostBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + BackfillService.createBackfillDryRun({ requestBody }) as unknown as Promise, + ...options, + }); +/** + * Post Connection + * Create connection entry. + * @param data The data for the request. + * @param data.requestBody + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ +export const useConnectionServicePostConnection = < + TData = Common.ConnectionServicePostConnectionMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: ConnectionBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: ConnectionBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + ConnectionService.postConnection({ requestBody }) as unknown as Promise, + ...options, + }); +/** + * Test Connection + * Test an API connection. + * + * This method first creates an in-memory transient conn_id & exports that to an env var, + * as some hook classes tries to find out the `conn` from their __init__ method & errors out if not found. + * It also deletes the conn id env connection after the test. + * @param data The data for the request. + * @param data.requestBody + * @returns ConnectionTestResponse Successful Response + * @throws ApiError + */ +export const useConnectionServiceTestConnection = < + TData = Common.ConnectionServiceTestConnectionMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: ConnectionBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: ConnectionBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + ConnectionService.testConnection({ requestBody }) as unknown as Promise, + ...options, + }); +/** + * Create Default Connections + * Create default connections. + * @returns void Successful Response + * @throws ApiError + */ +export const useConnectionServiceCreateDefaultConnections = < + TData = Common.ConnectionServiceCreateDefaultConnectionsMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit, "mutationFn">, +) => + useMutation({ + mutationFn: () => ConnectionService.createDefaultConnections() as unknown as Promise, + ...options, + }); +/** + * Clear Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.requestBody + * @returns unknown Successful Response + * @throws ApiError + */ +export const useDagRunServiceClearDagRun = < + TData = Common.DagRunServiceClearDagRunMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: DAGRunClearBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: DAGRunClearBody; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId, requestBody }) => + DagRunService.clearDagRun({ dagId, dagRunId, requestBody }) as unknown as Promise, + ...options, + }); +/** + * Trigger Dag Run + * Trigger a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.requestBody + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceTriggerDagRun = < + TData = Common.DagRunServiceTriggerDagRunMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: unknown; + requestBody: TriggerDAGRunPostBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: unknown; + requestBody: TriggerDAGRunPostBody; + }, + TContext + >({ + mutationFn: ({ dagId, requestBody }) => + DagRunService.triggerDagRun({ dagId, requestBody }) as unknown as Promise, + ...options, + }); +/** + * Get List Dag Runs Batch + * Get a list of DAG Runs. + * @param data The data for the request. + * @param data.dagId + * @param data.requestBody + * @returns DAGRunCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceGetListDagRunsBatch = < + TData = Common.DagRunServiceGetListDagRunsBatchMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: "~"; + requestBody: DAGRunsBatchBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: "~"; + requestBody: DAGRunsBatchBody; + }, + TContext + >({ + mutationFn: ({ dagId, requestBody }) => + DagRunService.getListDagRunsBatch({ dagId, requestBody }) as unknown as Promise, + ...options, + }); +/** + * Get Task Instances Batch + * Get list of task instances. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.requestBody + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstancesBatch = < + TData = Common.TaskInstanceServiceGetTaskInstancesBatchMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: "~"; + dagRunId: "~"; + requestBody: TaskInstancesBatchBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: "~"; + dagRunId: "~"; + requestBody: TaskInstancesBatchBody; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId, requestBody }) => + TaskInstanceService.getTaskInstancesBatch({ + dagId, + dagRunId, + requestBody, + }) as unknown as Promise, + ...options, + }); +/** + * Post Clear Task Instances + * Clear task instances. + * @param data The data for the request. + * @param data.dagId + * @param data.requestBody + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServicePostClearTaskInstances = < + TData = Common.TaskInstanceServicePostClearTaskInstancesMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + requestBody: ClearTaskInstancesBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + requestBody: ClearTaskInstancesBody; + }, + TContext + >({ + mutationFn: ({ dagId, requestBody }) => + TaskInstanceService.postClearTaskInstances({ dagId, requestBody }) as unknown as Promise, + ...options, + }); +/** + * Post Pool + * Create a Pool. + * @param data The data for the request. + * @param data.requestBody + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const usePoolServicePostPool = < + TData = Common.PoolServicePostPoolMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: PoolBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: PoolBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => PoolService.postPool({ requestBody }) as unknown as Promise, + ...options, + }); +/** + * Create Xcom Entry + * Create an XCom entry. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.dagRunId + * @param data.requestBody + * @returns XComResponseNative Successful Response + * @throws ApiError + */ +export const useXcomServiceCreateXcomEntry = < + TData = Common.XcomServiceCreateXcomEntryMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: XComCreateBody; + taskId: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: XComCreateBody; + taskId: string; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId, requestBody, taskId }) => + XcomService.createXcomEntry({ dagId, dagRunId, requestBody, taskId }) as unknown as Promise, + ...options, + }); +/** + * Post Variable + * Create a variable. + * @param data The data for the request. + * @param data.requestBody + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const useVariableServicePostVariable = < + TData = Common.VariableServicePostVariableMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: VariableBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: VariableBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + VariableService.postVariable({ requestBody }) as unknown as Promise, + ...options, + }); +/** + * Pause Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns BackfillResponse Successful Response + * @throws ApiError + */ +export const useBackfillServicePauseBackfill = < + TData = Common.BackfillServicePauseBackfillMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + backfillId: number; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + backfillId: number; + }, + TContext + >({ + mutationFn: ({ backfillId }) => + BackfillService.pauseBackfill({ backfillId }) as unknown as Promise, + ...options, + }); +/** + * Unpause Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns BackfillResponse Successful Response + * @throws ApiError + */ +export const useBackfillServiceUnpauseBackfill = < + TData = Common.BackfillServiceUnpauseBackfillMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + backfillId: number; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + backfillId: number; + }, + TContext + >({ + mutationFn: ({ backfillId }) => + BackfillService.unpauseBackfill({ backfillId }) as unknown as Promise, + ...options, + }); +/** + * Cancel Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns BackfillResponse Successful Response + * @throws ApiError + */ +export const useBackfillServiceCancelBackfill = < + TData = Common.BackfillServiceCancelBackfillMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + backfillId: number; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + backfillId: number; + }, + TContext + >({ + mutationFn: ({ backfillId }) => + BackfillService.cancelBackfill({ backfillId }) as unknown as Promise, + ...options, + }); +/** + * Reparse Dag File + * Request re-parsing a DAG file. + * @param data The data for the request. + * @param data.fileToken + * @returns null Successful Response + * @throws ApiError + */ +export const useDagParsingServiceReparseDagFile = < + TData = Common.DagParsingServiceReparseDagFileMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + fileToken: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + fileToken: string; + }, + TContext + >({ + mutationFn: ({ fileToken }) => + DagParsingService.reparseDagFile({ fileToken }) as unknown as Promise, + ...options, + }); +/** + * Patch Connection + * Update a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @param data.requestBody + * @param data.updateMask + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ +export const useConnectionServicePatchConnection = < + TData = Common.ConnectionServicePatchConnectionMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + connectionId: string; + requestBody: ConnectionBody; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + connectionId: string; + requestBody: ConnectionBody; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ connectionId, requestBody, updateMask }) => + ConnectionService.patchConnection({ + connectionId, + requestBody, + updateMask, + }) as unknown as Promise, + ...options, + }); +/** + * Bulk Connections + * Bulk create, update, and delete connections. + * @param data The data for the request. + * @param data.requestBody + * @returns BulkResponse Successful Response + * @throws ApiError + */ +export const useConnectionServiceBulkConnections = < + TData = Common.ConnectionServiceBulkConnectionsMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: BulkBody_ConnectionBody_; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: BulkBody_ConnectionBody_; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + ConnectionService.bulkConnections({ requestBody }) as unknown as Promise, + ...options, + }); +/** + * Patch Dag Run + * Modify a DAG Run. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.requestBody + * @param data.updateMask + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const useDagRunServicePatchDagRun = < + TData = Common.DagRunServicePatchDagRunMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: DAGRunPatchBody; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: DAGRunPatchBody; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId, requestBody, updateMask }) => + DagRunService.patchDagRun({ dagId, dagRunId, requestBody, updateMask }) as unknown as Promise, + ...options, + }); +/** + * Patch Dags + * Patch multiple DAGs. + * @param data The data for the request. + * @param data.requestBody + * @param data.updateMask + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.tagsMatchMode + * @param data.owners + * @param data.dagIdPattern + * @param data.excludeStale + * @param data.paused + * @returns DAGCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagServicePatchDags = < + TData = Common.DagServicePatchDagsMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagIdPattern?: string; + excludeStale?: boolean; + limit?: number; + offset?: number; + owners?: string[]; + paused?: boolean; + requestBody: DAGPatchBody; + tags?: string[]; + tagsMatchMode?: "any" | "all"; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagIdPattern?: string; + excludeStale?: boolean; + limit?: number; + offset?: number; + owners?: string[]; + paused?: boolean; + requestBody: DAGPatchBody; + tags?: string[]; + tagsMatchMode?: "any" | "all"; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ + dagIdPattern, + excludeStale, + limit, + offset, + owners, + paused, + requestBody, + tags, + tagsMatchMode, + updateMask, + }) => + DagService.patchDags({ + dagIdPattern, + excludeStale, + limit, + offset, + owners, + paused, + requestBody, + tags, + tagsMatchMode, + updateMask, + }) as unknown as Promise, + ...options, + }); +/** + * Patch Dag + * Patch the specific DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.requestBody + * @param data.updateMask + * @returns DAGResponse Successful Response + * @throws ApiError + */ +export const useDagServicePatchDag = < + TData = Common.DagServicePatchDagMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + requestBody: DAGPatchBody; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + requestBody: DAGPatchBody; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ dagId, requestBody, updateMask }) => + DagService.patchDag({ dagId, requestBody, updateMask }) as unknown as Promise, + ...options, + }); +/** + * Patch Task Instance + * Update a task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.requestBody + * @param data.mapIndex + * @param data.updateMask + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServicePatchTaskInstance = < + TData = Common.TaskInstanceServicePatchTaskInstanceMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + mapIndex?: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + mapIndex?: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => + TaskInstanceService.patchTaskInstance({ + dagId, + dagRunId, + mapIndex, + requestBody, + taskId, + updateMask, + }) as unknown as Promise, + ...options, + }); +/** + * Patch Task Instance + * Update a task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @param data.requestBody + * @param data.updateMask + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServicePatchTaskInstanceByMapIndex = < + TData = Common.TaskInstanceServicePatchTaskInstanceByMapIndexMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + mapIndex: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + mapIndex: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => + TaskInstanceService.patchTaskInstanceByMapIndex({ + dagId, + dagRunId, + mapIndex, + requestBody, + taskId, + updateMask, + }) as unknown as Promise, + ...options, + }); +/** + * Patch Task Instance Dry Run + * Update a task instance dry_run mode. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @param data.requestBody + * @param data.updateMask + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServicePatchTaskInstanceDryRunByMapIndex = < + TData = Common.TaskInstanceServicePatchTaskInstanceDryRunByMapIndexMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + mapIndex: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + mapIndex: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => + TaskInstanceService.patchTaskInstanceDryRunByMapIndex({ + dagId, + dagRunId, + mapIndex, + requestBody, + taskId, + updateMask, + }) as unknown as Promise, + ...options, + }); +/** + * Patch Task Instance Dry Run + * Update a task instance dry_run mode. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.requestBody + * @param data.mapIndex + * @param data.updateMask + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServicePatchTaskInstanceDryRun = < + TData = Common.TaskInstanceServicePatchTaskInstanceDryRunMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + mapIndex?: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + mapIndex?: number; + requestBody: PatchTaskInstanceBody; + taskId: string; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => + TaskInstanceService.patchTaskInstanceDryRun({ + dagId, + dagRunId, + mapIndex, + requestBody, + taskId, + updateMask, + }) as unknown as Promise, + ...options, + }); +/** + * Patch Pool + * Update a Pool. + * @param data The data for the request. + * @param data.poolName + * @param data.requestBody + * @param data.updateMask + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const usePoolServicePatchPool = < + TData = Common.PoolServicePatchPoolMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + poolName: string; + requestBody: PoolPatchBody; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + poolName: string; + requestBody: PoolPatchBody; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ poolName, requestBody, updateMask }) => + PoolService.patchPool({ poolName, requestBody, updateMask }) as unknown as Promise, + ...options, + }); +/** + * Bulk Pools + * Bulk create, update, and delete pools. + * @param data The data for the request. + * @param data.requestBody + * @returns BulkResponse Successful Response + * @throws ApiError + */ +export const usePoolServiceBulkPools = < + TData = Common.PoolServiceBulkPoolsMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: BulkBody_PoolBody_; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: BulkBody_PoolBody_; + }, + TContext + >({ + mutationFn: ({ requestBody }) => PoolService.bulkPools({ requestBody }) as unknown as Promise, + ...options, + }); +/** + * Update Xcom Entry + * Update an existing XCom entry. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.dagRunId + * @param data.xcomKey + * @param data.requestBody + * @returns XComResponseNative Successful Response + * @throws ApiError + */ +export const useXcomServiceUpdateXcomEntry = < + TData = Common.XcomServiceUpdateXcomEntryMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: XComUpdateBody; + taskId: string; + xcomKey: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: XComUpdateBody; + taskId: string; + xcomKey: string; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId, requestBody, taskId, xcomKey }) => + XcomService.updateXcomEntry({ + dagId, + dagRunId, + requestBody, + taskId, + xcomKey, + }) as unknown as Promise, + ...options, + }); +/** + * Patch Variable + * Update a variable by key. + * @param data The data for the request. + * @param data.variableKey + * @param data.requestBody + * @param data.updateMask + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const useVariableServicePatchVariable = < + TData = Common.VariableServicePatchVariableMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: VariableBody; + updateMask?: string[]; + variableKey: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: VariableBody; + updateMask?: string[]; + variableKey: string; + }, + TContext + >({ + mutationFn: ({ requestBody, updateMask, variableKey }) => + VariableService.patchVariable({ requestBody, updateMask, variableKey }) as unknown as Promise, + ...options, + }); +/** + * Bulk Variables + * Bulk create, update, and delete variables. + * @param data The data for the request. + * @param data.requestBody + * @returns BulkResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceBulkVariables = < + TData = Common.VariableServiceBulkVariablesMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: BulkBody_VariableBody_; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: BulkBody_VariableBody_; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + VariableService.bulkVariables({ requestBody }) as unknown as Promise, + ...options, + }); +/** + * Delete Asset Queued Events + * Delete queued asset events for an asset. + * @param data The data for the request. + * @param data.assetId + * @param data.before + * @returns void Successful Response + * @throws ApiError + */ +export const useAssetServiceDeleteAssetQueuedEvents = < + TData = Common.AssetServiceDeleteAssetQueuedEventsMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + assetId: number; + before?: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + assetId: number; + before?: string; + }, + TContext + >({ + mutationFn: ({ assetId, before }) => + AssetService.deleteAssetQueuedEvents({ assetId, before }) as unknown as Promise, + ...options, + }); +/** + * Delete Dag Asset Queued Events + * @param data The data for the request. + * @param data.dagId + * @param data.before + * @returns void Successful Response + * @throws ApiError + */ +export const useAssetServiceDeleteDagAssetQueuedEvents = < + TData = Common.AssetServiceDeleteDagAssetQueuedEventsMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + before?: string; + dagId: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + before?: string; + dagId: string; + }, + TContext + >({ + mutationFn: ({ before, dagId }) => + AssetService.deleteDagAssetQueuedEvents({ before, dagId }) as unknown as Promise, + ...options, + }); +/** + * Delete Dag Asset Queued Event + * Delete a queued asset event for a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.assetId + * @param data.before + * @returns void Successful Response + * @throws ApiError + */ +export const useAssetServiceDeleteDagAssetQueuedEvent = < + TData = Common.AssetServiceDeleteDagAssetQueuedEventMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + assetId: number; + before?: string; + dagId: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + assetId: number; + before?: string; + dagId: string; + }, + TContext + >({ + mutationFn: ({ assetId, before, dagId }) => + AssetService.deleteDagAssetQueuedEvent({ assetId, before, dagId }) as unknown as Promise, + ...options, + }); +/** + * Delete Connection + * Delete a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @returns void Successful Response + * @throws ApiError + */ +export const useConnectionServiceDeleteConnection = < + TData = Common.ConnectionServiceDeleteConnectionMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + connectionId: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + connectionId: string; + }, + TContext + >({ + mutationFn: ({ connectionId }) => + ConnectionService.deleteConnection({ connectionId }) as unknown as Promise, + ...options, + }); +/** + * Delete Dag Run + * Delete a DAG Run entry. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns void Successful Response + * @throws ApiError + */ +export const useDagRunServiceDeleteDagRun = < + TData = Common.DagRunServiceDeleteDagRunMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId }) => + DagRunService.deleteDagRun({ dagId, dagRunId }) as unknown as Promise, + ...options, + }); +/** + * Delete Dag + * Delete the specific DAG. + * @param data The data for the request. + * @param data.dagId + * @returns unknown Successful Response + * @throws ApiError + */ +export const useDagServiceDeleteDag = < + TData = Common.DagServiceDeleteDagMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + }, + TContext + >({ mutationFn: ({ dagId }) => DagService.deleteDag({ dagId }) as unknown as Promise, ...options }); +/** + * Delete Pool + * Delete a pool entry. + * @param data The data for the request. + * @param data.poolName + * @returns void Successful Response + * @throws ApiError + */ +export const usePoolServiceDeletePool = < + TData = Common.PoolServiceDeletePoolMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + poolName: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + poolName: string; + }, + TContext + >({ + mutationFn: ({ poolName }) => PoolService.deletePool({ poolName }) as unknown as Promise, + ...options, + }); /** -* Get Assets -* Get assets. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.namePattern -* @param data.uriPattern -* @param data.dagIds -* @param data.onlyActive -* @param data.orderBy -* @returns AssetCollectionResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetAssets = = unknown[]>({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }: { - dagIds?: string[]; - limit?: number; - namePattern?: string; - offset?: number; - onlyActive?: boolean; - orderBy?: string; - uriPattern?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetAssetsKeyFn({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }, queryKey), queryFn: () => AssetService.getAssets({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }) as TData, ...options }); -/** -* Get Asset Aliases -* Get asset aliases. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.namePattern -* @param data.orderBy -* @returns AssetAliasCollectionResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetAssetAliases = = unknown[]>({ limit, namePattern, offset, orderBy }: { - limit?: number; - namePattern?: string; - offset?: number; - orderBy?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }, queryKey), queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }) as TData, ...options }); -/** -* Get Asset Alias -* Get an asset alias. -* @param data The data for the request. -* @param data.assetAliasId -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetAssetAlias = = unknown[]>({ assetAliasId }: { - assetAliasId: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }, queryKey), queryFn: () => AssetService.getAssetAlias({ assetAliasId }) as TData, ...options }); -/** -* Get Asset Events -* Get asset events. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.assetId -* @param data.sourceDagId -* @param data.sourceTaskId -* @param data.sourceRunId -* @param data.sourceMapIndex -* @param data.timestampGte -* @param data.timestampLte -* @returns AssetEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetAssetEvents = = unknown[]>({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }: { - assetId?: number; - limit?: number; - offset?: number; - orderBy?: string; - sourceDagId?: string; - sourceMapIndex?: number; - sourceRunId?: string; - sourceTaskId?: string; - timestampGte?: string; - timestampLte?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetAssetEventsKeyFn({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }, queryKey), queryFn: () => AssetService.getAssetEvents({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }) as TData, ...options }); -/** -* Get Asset Queued Events -* Get queued asset events for an asset. -* @param data The data for the request. -* @param data.assetId -* @param data.before -* @returns QueuedEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetAssetQueuedEvents = = unknown[]>({ assetId, before }: { - assetId: number; - before?: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }, queryKey), queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }) as TData, ...options }); -/** -* Get Asset -* Get an asset. -* @param data The data for the request. -* @param data.assetId -* @returns AssetResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetAsset = = unknown[]>({ assetId }: { - assetId: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }, queryKey), queryFn: () => AssetService.getAsset({ assetId }) as TData, ...options }); -/** -* Get Dag Asset Queued Events -* Get queued asset events for a DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.before -* @returns QueuedEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetDagAssetQueuedEvents = = unknown[]>({ before, dagId }: { - before?: string; - dagId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }, queryKey), queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }) as TData, ...options }); -/** -* Get Dag Asset Queued Event -* Get a queued asset event for a DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.assetId -* @param data.before -* @returns QueuedEventResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetDagAssetQueuedEvent = = unknown[]>({ assetId, before, dagId }: { - assetId: number; - before?: string; - dagId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }, queryKey), queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }) as TData, ...options }); -/** -* Next Run Assets -* @param data The data for the request. -* @param data.dagId -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useAssetServiceNextRunAssets = = unknown[]>({ dagId }: { - dagId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }, queryKey), queryFn: () => AssetService.nextRunAssets({ dagId }) as TData, ...options }); -/** -* List Backfills -* @param data The data for the request. -* @param data.dagId -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns BackfillCollectionResponse Successful Response -* @throws ApiError -*/ -export const useBackfillServiceListBackfills = = unknown[]>({ dagId, limit, offset, orderBy }: { - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }, queryKey), queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }) as TData, ...options }); -/** -* Get Backfill -* @param data The data for the request. -* @param data.backfillId -* @returns BackfillResponse Successful Response -* @throws ApiError -*/ -export const useBackfillServiceGetBackfill = = unknown[]>({ backfillId }: { - backfillId: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }, queryKey), queryFn: () => BackfillService.getBackfill({ backfillId }) as TData, ...options }); -/** -* List Backfills -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.dagId -* @param data.active -* @returns BackfillCollectionResponse Successful Response -* @throws ApiError -*/ -export const useBackfillServiceListBackfills1 = = unknown[]>({ active, dagId, limit, offset, orderBy }: { - active?: boolean; - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseBackfillServiceListBackfills1KeyFn({ active, dagId, limit, offset, orderBy }, queryKey), queryFn: () => BackfillService.listBackfills1({ active, dagId, limit, offset, orderBy }) as TData, ...options }); -/** -* Get Connection -* Get a connection entry. -* @param data The data for the request. -* @param data.connectionId -* @returns ConnectionResponse Successful Response -* @throws ApiError -*/ -export const useConnectionServiceGetConnection = = unknown[]>({ connectionId }: { - connectionId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }, queryKey), queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, ...options }); -/** -* Get Connections -* Get all connection entries. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.connectionIdPattern -* @returns ConnectionCollectionResponse Successful Response -* @throws ApiError -*/ -export const useConnectionServiceGetConnections = = unknown[]>({ connectionIdPattern, limit, offset, orderBy }: { - connectionIdPattern?: string; - limit?: number; - offset?: number; - orderBy?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ connectionIdPattern, limit, offset, orderBy }, queryKey), queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }) as TData, ...options }); -/** -* Hook Meta Data -* Retrieve information about available connection types (hook classes) and their parameters. -* @returns ConnectionHookMetaData Successful Response -* @throws ApiError -*/ -export const useConnectionServiceHookMetaData = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(queryKey), queryFn: () => ConnectionService.hookMetaData() as TData, ...options }); -/** -* Get Dag Run -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @returns DAGRunResponse Successful Response -* @throws ApiError -*/ -export const useDagRunServiceGetDagRun = = unknown[]>({ dagId, dagRunId }: { - dagId: string; - dagRunId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }, queryKey), queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, ...options }); -/** -* Get Upstream Asset Events -* If dag run is asset-triggered, return the asset events that triggered it. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @returns AssetEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagRunServiceGetUpstreamAssetEvents = = unknown[]>({ dagId, dagRunId }: { - dagId: string; - dagRunId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }, queryKey), queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }) as TData, ...options }); -/** -* Get Dag Runs -* Get all DAG Runs. -* -* This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. -* @param data The data for the request. -* @param data.dagId -* @param data.limit -* @param data.offset -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.updatedAtGte -* @param data.updatedAtLte -* @param data.runType -* @param data.state -* @param data.orderBy -* @returns DAGRunCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagRunServiceGetDagRuns = = unknown[]>({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { - dagId: string; - endDateGte?: string; - endDateLte?: string; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - updatedAtGte?: string; - updatedAtLte?: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }, queryKey), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }) as TData, ...options }); -/** -* Get Dag Source -* Get source code using file token. -* @param data The data for the request. -* @param data.dagId -* @param data.versionNumber -* @param data.accept -* @returns DAGSourceResponse Successful Response -* @throws ApiError -*/ -export const useDagSourceServiceGetDagSource = = unknown[]>({ accept, dagId, versionNumber }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - versionNumber?: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }, queryKey), queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }) as TData, ...options }); -/** -* Get Dag Stats -* Get Dag statistics. -* @param data The data for the request. -* @param data.dagIds -* @returns DagStatsCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagStatsServiceGetDagStats = = unknown[]>({ dagIds }: { - dagIds?: string[]; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }, queryKey), queryFn: () => DagStatsService.getDagStats({ dagIds }) as TData, ...options }); -/** -* Get Dag Reports -* Get DAG report. -* @param data The data for the request. -* @param data.subdir -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useDagReportServiceGetDagReports = = unknown[]>({ subdir }: { - subdir: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }, queryKey), queryFn: () => DagReportService.getDagReports({ subdir }) as TData, ...options }); -/** -* Get Config -* @param data The data for the request. -* @param data.section -* @param data.accept -* @returns Config Successful Response -* @throws ApiError -*/ -export const useConfigServiceGetConfig = = unknown[]>({ accept, section }: { - accept?: "application/json" | "text/plain" | "*/*"; - section?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }, queryKey), queryFn: () => ConfigService.getConfig({ accept, section }) as TData, ...options }); -/** -* Get Config Value -* @param data The data for the request. -* @param data.section -* @param data.option -* @param data.accept -* @returns Config Successful Response -* @throws ApiError -*/ -export const useConfigServiceGetConfigValue = = unknown[]>({ accept, option, section }: { - accept?: "application/json" | "text/plain" | "*/*"; - option: string; - section: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }, queryKey), queryFn: () => ConfigService.getConfigValue({ accept, option, section }) as TData, ...options }); -/** -* Get Configs -* Get configs for UI. -* @returns ConfigResponse Successful Response -* @throws ApiError -*/ -export const useConfigServiceGetConfigs = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseConfigServiceGetConfigsKeyFn(queryKey), queryFn: () => ConfigService.getConfigs() as TData, ...options }); -/** -* List Dag Warnings -* Get a list of DAG warnings. -* @param data The data for the request. -* @param data.dagId -* @param data.warningType -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns DAGWarningCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagWarningServiceListDagWarnings = = unknown[]>({ dagId, limit, offset, orderBy, warningType }: { - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - warningType?: DagWarningType; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ dagId, limit, offset, orderBy, warningType }, queryKey), queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }) as TData, ...options }); -/** -* Get Dags -* Get all DAGs. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.tags -* @param data.tagsMatchMode -* @param data.owners -* @param data.dagIdPattern -* @param data.dagDisplayNamePattern -* @param data.excludeStale -* @param data.paused -* @param data.lastDagRunState -* @param data.dagRunStartDateGte -* @param data.dagRunStartDateLte -* @param data.dagRunEndDateGte -* @param data.dagRunEndDateLte -* @param data.dagRunState -* @param data.orderBy -* @returns DAGCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagServiceGetDags = = unknown[]>({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagRunEndDateGte?: string; - dagRunEndDateLte?: string; - dagRunStartDateGte?: string; - dagRunStartDateLte?: string; - dagRunState?: string[]; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - orderBy?: string; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDags({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); -/** -* Get Dag -* Get basic information about a DAG. -* @param data The data for the request. -* @param data.dagId -* @returns DAGResponse Successful Response -* @throws ApiError -*/ -export const useDagServiceGetDag = = unknown[]>({ dagId }: { - dagId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }, queryKey), queryFn: () => DagService.getDag({ dagId }) as TData, ...options }); -/** -* Get Dag Details -* Get details of DAG. -* @param data The data for the request. -* @param data.dagId -* @returns DAGDetailsResponse Successful Response -* @throws ApiError -*/ -export const useDagServiceGetDagDetails = = unknown[]>({ dagId }: { - dagId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }, queryKey), queryFn: () => DagService.getDagDetails({ dagId }) as TData, ...options }); -/** -* Get Dag Tags -* Get all DAG tags. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.tagNamePattern -* @returns DAGTagCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagServiceGetDagTags = = unknown[]>({ limit, offset, orderBy, tagNamePattern }: { - limit?: number; - offset?: number; - orderBy?: string; - tagNamePattern?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }, queryKey), queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }) as TData, ...options }); -/** -* Recent Dag Runs -* Get recent DAG runs. -* @param data The data for the request. -* @param data.dagRunsLimit -* @param data.limit -* @param data.offset -* @param data.tags -* @param data.tagsMatchMode -* @param data.owners -* @param data.dagIds -* @param data.dagIdPattern -* @param data.dagDisplayNamePattern -* @param data.excludeStale -* @param data.paused -* @param data.lastDagRunState -* @returns DAGWithLatestDagRunsCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagServiceRecentDagRuns = = unknown[]>({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagIds?: string[]; - dagRunsLimit?: number; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagServiceRecentDagRunsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.recentDagRuns({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }) as TData, ...options }); -/** -* Get Event Log -* @param data The data for the request. -* @param data.eventLogId -* @returns EventLogResponse Successful Response -* @throws ApiError -*/ -export const useEventLogServiceGetEventLog = = unknown[]>({ eventLogId }: { - eventLogId: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }, queryKey), queryFn: () => EventLogService.getEventLog({ eventLogId }) as TData, ...options }); -/** -* Get Event Logs -* Get all Event Logs. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.dagId -* @param data.taskId -* @param data.runId -* @param data.mapIndex -* @param data.tryNumber -* @param data.owner -* @param data.event -* @param data.excludedEvents -* @param data.includedEvents -* @param data.before -* @param data.after -* @returns EventLogCollectionResponse Successful Response -* @throws ApiError -*/ -export const useEventLogServiceGetEventLogs = = unknown[]>({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }: { - after?: string; - before?: string; - dagId?: string; - event?: string; - excludedEvents?: string[]; - includedEvents?: string[]; - limit?: number; - mapIndex?: number; - offset?: number; - orderBy?: string; - owner?: string; - runId?: string; - taskId?: string; - tryNumber?: number; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }, queryKey), queryFn: () => EventLogService.getEventLogs({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }) as TData, ...options }); -/** -* Get Extra Links -* Get extra links for task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns ExtraLinkCollectionResponse Successful Response -* @throws ApiError -*/ -export const useExtraLinksServiceGetExtraLinks = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Extra Links -* Get extra links for task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns ExtraLinkCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetExtraLinks = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Task Instance -* Get task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @returns TaskInstanceResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstance = = unknown[]>({ dagId, dagRunId, taskId }: { - dagId: string; - dagRunId: string; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) as TData, ...options }); -/** -* Get Mapped Task Instances -* Get list of mapped task instances. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.updatedAtGte -* @param data.updatedAtLte -* @param data.durationGte -* @param data.durationLte -* @param data.state -* @param data.pool -* @param data.queue -* @param data.executor -* @param data.versionNumber -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetMappedTaskInstances = = unknown[]>({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskId: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }) as TData, ...options }); -/** -* Get Task Instance Dependencies -* Get dependencies blocking task from getting scheduled. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskDependencyCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstanceDependenciesByMapIndex = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceDependenciesByMapIndex({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Task Instance Dependencies -* Get dependencies blocking task from getting scheduled. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskDependencyCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstanceDependencies = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Task Instance Tries -* Get list of task instances history. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskInstanceHistoryCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstanceTries = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Mapped Task Instance Tries -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskInstanceHistoryCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetMappedTaskInstanceTries = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Mapped Task Instance -* Get task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskInstanceResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetMappedTaskInstance = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Task Instances -* Get list of task instances. -* -* This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs -* and DAG runs. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.updatedAtGte -* @param data.updatedAtLte -* @param data.durationGte -* @param data.durationLte -* @param data.taskDisplayNamePattern -* @param data.state -* @param data.pool -* @param data.queue -* @param data.executor -* @param data.versionNumber -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstances = = unknown[]>({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskDisplayNamePattern?: string; - taskId?: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }, queryKey), queryFn: () => TaskInstanceService.getTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }) as TData, ...options }); -/** -* Get Task Instance Try Details -* Get task instance details by try number. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.taskTryNumber -* @param data.mapIndex -* @returns TaskInstanceHistoryResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstanceTryDetails = = unknown[]>({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - taskTryNumber: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) as TData, ...options }); -/** -* Get Mapped Task Instance Try Details -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.taskTryNumber -* @param data.mapIndex -* @returns TaskInstanceHistoryResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetMappedTaskInstanceTryDetails = = unknown[]>({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - taskTryNumber: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) as TData, ...options }); -/** -* Get Log -* Get logs for a specific task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.tryNumber -* @param data.fullContent -* @param data.mapIndex -* @param data.token -* @param data.accept -* @returns TaskInstancesLogResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetLog = = unknown[]>({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }: { - accept?: "application/json" | "*/*" | "application/x-ndjson"; - dagId: string; - dagRunId: string; - fullContent?: boolean; - mapIndex?: number; - taskId: string; - token?: string; - tryNumber: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskInstanceServiceGetLogKeyFn({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }, queryKey), queryFn: () => TaskInstanceService.getLog({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }) as TData, ...options }); -/** -* Get Import Error -* Get an import error. -* @param data The data for the request. -* @param data.importErrorId -* @returns ImportErrorResponse Successful Response -* @throws ApiError -*/ -export const useImportErrorServiceGetImportError = = unknown[]>({ importErrorId }: { - importErrorId: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }, queryKey), queryFn: () => ImportErrorService.getImportError({ importErrorId }) as TData, ...options }); -/** -* Get Import Errors -* Get all import errors. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns ImportErrorCollectionResponse Successful Response -* @throws ApiError -*/ -export const useImportErrorServiceGetImportErrors = = unknown[]>({ limit, offset, orderBy }: { - limit?: number; - offset?: number; - orderBy?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }, queryKey), queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, ...options }); -/** -* Get Jobs -* Get all jobs. -* @param data The data for the request. -* @param data.isAlive -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.jobState -* @param data.jobType -* @param data.hostname -* @param data.executorClass -* @returns JobCollectionResponse Successful Response -* @throws ApiError -*/ -export const useJobServiceGetJobs = = unknown[]>({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }: { - endDateGte?: string; - endDateLte?: string; - executorClass?: string; - hostname?: string; - isAlive?: boolean; - jobState?: string; - jobType?: string; - limit?: number; - offset?: number; - orderBy?: string; - startDateGte?: string; - startDateLte?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseJobServiceGetJobsKeyFn({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }, queryKey), queryFn: () => JobService.getJobs({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }) as TData, ...options }); -/** -* Get Plugins -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @returns PluginCollectionResponse Successful Response -* @throws ApiError -*/ -export const usePluginServiceGetPlugins = = unknown[]>({ limit, offset }: { - limit?: number; - offset?: number; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }, queryKey), queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, ...options }); -/** -* Get Pool -* Get a pool. -* @param data The data for the request. -* @param data.poolName -* @returns PoolResponse Successful Response -* @throws ApiError -*/ -export const usePoolServiceGetPool = = unknown[]>({ poolName }: { - poolName: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), queryFn: () => PoolService.getPool({ poolName }) as TData, ...options }); -/** -* Get Pools -* Get all pools entries. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.poolNamePattern -* @returns PoolCollectionResponse Successful Response -* @throws ApiError -*/ -export const usePoolServiceGetPools = = unknown[]>({ limit, offset, orderBy, poolNamePattern }: { - limit?: number; - offset?: number; - orderBy?: string; - poolNamePattern?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }, queryKey), queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }) as TData, ...options }); -/** -* Get Providers -* Get providers. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @returns ProviderCollectionResponse Successful Response -* @throws ApiError -*/ -export const useProviderServiceGetProviders = = unknown[]>({ limit, offset }: { - limit?: number; - offset?: number; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }, queryKey), queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, ...options }); -/** -* Get Xcom Entry -* Get an XCom entry. -* @param data The data for the request. -* @param data.dagId -* @param data.taskId -* @param data.dagRunId -* @param data.xcomKey -* @param data.mapIndex -* @param data.deserialize -* @param data.stringify -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useXcomServiceGetXcomEntry = = unknown[]>({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }: { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseXcomServiceGetXcomEntryKeyFn({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, queryKey), queryFn: () => XcomService.getXcomEntry({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }) as TData, ...options }); -/** -* Get Xcom Entries -* Get all XCom entries. -* -* This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.xcomKey -* @param data.mapIndex -* @param data.limit -* @param data.offset -* @returns XComCollectionResponse Successful Response -* @throws ApiError -*/ -export const useXcomServiceGetXcomEntries = = unknown[]>({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }: { - dagId: string; - dagRunId: string; - limit?: number; - mapIndex?: number; - offset?: number; - taskId: string; - xcomKey?: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }, queryKey), queryFn: () => XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }) as TData, ...options }); -/** -* Get Tasks -* Get tasks for DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.orderBy -* @returns TaskCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskServiceGetTasks = = unknown[]>({ dagId, orderBy }: { - dagId: string; - orderBy?: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }, queryKey), queryFn: () => TaskService.getTasks({ dagId, orderBy }) as TData, ...options }); -/** -* Get Task -* Get simplified representation of a task. -* @param data The data for the request. -* @param data.dagId -* @param data.taskId -* @returns TaskResponse Successful Response -* @throws ApiError -*/ -export const useTaskServiceGetTask = = unknown[]>({ dagId, taskId }: { - dagId: string; - taskId: unknown; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }, queryKey), queryFn: () => TaskService.getTask({ dagId, taskId }) as TData, ...options }); -/** -* Get Variable -* Get a variable entry. -* @param data The data for the request. -* @param data.variableKey -* @returns VariableResponse Successful Response -* @throws ApiError -*/ -export const useVariableServiceGetVariable = = unknown[]>({ variableKey }: { - variableKey: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }, queryKey), queryFn: () => VariableService.getVariable({ variableKey }) as TData, ...options }); -/** -* Get Variables -* Get all Variables entries. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.variableKeyPattern -* @returns VariableCollectionResponse Successful Response -* @throws ApiError -*/ -export const useVariableServiceGetVariables = = unknown[]>({ limit, offset, orderBy, variableKeyPattern }: { - limit?: number; - offset?: number; - orderBy?: string; - variableKeyPattern?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseVariableServiceGetVariablesKeyFn({ limit, offset, orderBy, variableKeyPattern }, queryKey), queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }) as TData, ...options }); -/** -* Get Dag Version -* Get one Dag Version. -* @param data The data for the request. -* @param data.dagId -* @param data.versionNumber -* @returns DagVersionResponse Successful Response -* @throws ApiError -*/ -export const useDagVersionServiceGetDagVersion = = unknown[]>({ dagId, versionNumber }: { - dagId: string; - versionNumber: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }) as TData, ...options }); -/** -* Get Dag Versions -* Get all DAG Versions. -* -* This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. -* @param data The data for the request. -* @param data.dagId -* @param data.limit -* @param data.offset -* @param data.versionNumber -* @param data.bundleName -* @param data.bundleVersion -* @param data.orderBy -* @returns DAGVersionCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagVersionServiceGetDagVersions = = unknown[]>({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }: { - bundleName?: string; - bundleVersion?: string; - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - versionNumber?: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) as TData, ...options }); -/** -* Get Health -* @returns HealthInfoResponse Successful Response -* @throws ApiError -*/ -export const useMonitorServiceGetHealth = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), queryFn: () => MonitorService.getHealth() as TData, ...options }); -/** -* Get Version -* Get version information. -* @returns VersionInfo Successful Response -* @throws ApiError -*/ -export const useVersionServiceGetVersion = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), queryFn: () => VersionService.getVersion() as TData, ...options }); -/** -* Login -* Redirect to the login URL depending on the AuthManager configured. -* @param data The data for the request. -* @param data.next -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useLoginServiceLogin = = unknown[]>({ next }: { - next?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseLoginServiceLoginKeyFn({ next }, queryKey), queryFn: () => LoginService.login({ next }) as TData, ...options }); -/** -* Logout -* Logout the user. -* @param data The data for the request. -* @param data.next -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useLoginServiceLogout = = unknown[]>({ next }: { - next?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseLoginServiceLogoutKeyFn({ next }, queryKey), queryFn: () => LoginService.logout({ next }) as TData, ...options }); -/** -* Get Auth Menus -* @returns MenuItemCollectionResponse Successful Response -* @throws ApiError -*/ -export const useAuthLinksServiceGetAuthMenus = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(queryKey), queryFn: () => AuthLinksService.getAuthMenus() as TData, ...options }); -/** -* Get Dependencies -* Dependencies graph. -* @param data The data for the request. -* @param data.nodeId -* @returns BaseGraphResponse Successful Response -* @throws ApiError -*/ -export const useDependenciesServiceGetDependencies = = unknown[]>({ nodeId }: { - nodeId?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }, queryKey), queryFn: () => DependenciesService.getDependencies({ nodeId }) as TData, ...options }); -/** -* Historical Metrics -* Return cluster activity historical metrics. -* @param data The data for the request. -* @param data.startDate -* @param data.endDate -* @returns HistoricalMetricDataResponse Successful Response -* @throws ApiError -*/ -export const useDashboardServiceHistoricalMetrics = = unknown[]>({ endDate, startDate }: { - endDate?: string; - startDate: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }, queryKey), queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }) as TData, ...options }); -/** -* Dag Stats -* Return basic DAG stats with counts of DAGs in various states. -* @returns DashboardDagStatsResponse Successful Response -* @throws ApiError -*/ -export const useDashboardServiceDagStats = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseDashboardServiceDagStatsKeyFn(queryKey), queryFn: () => DashboardService.dagStats() as TData, ...options }); -/** -* Structure Data -* Get Structure Data. -* @param data The data for the request. -* @param data.dagId -* @param data.includeUpstream -* @param data.includeDownstream -* @param data.root -* @param data.externalDependencies -* @param data.versionNumber -* @returns StructureDataResponse Successful Response -* @throws ApiError -*/ -export const useStructureServiceStructureData = = unknown[]>({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }: { - dagId: string; - externalDependencies?: boolean; - includeDownstream?: boolean; - includeUpstream?: boolean; - root?: string; - versionNumber?: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseStructureServiceStructureDataKeyFn({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }, queryKey), queryFn: () => StructureService.structureData({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }) as TData, ...options }); -/** -* Grid Data -* Return grid data. -* @param data The data for the request. -* @param data.dagId -* @param data.includeUpstream -* @param data.includeDownstream -* @param data.root -* @param data.offset -* @param data.runType -* @param data.state -* @param data.limit -* @param data.orderBy -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @returns GridResponse Successful Response -* @throws ApiError -*/ -export const useGridServiceGridData = = unknown[]>({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useQuery({ queryKey: Common.UseGridServiceGridDataKeyFn({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }, queryKey), queryFn: () => GridService.gridData({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }) as TData, ...options }); -/** -* Create Asset Event -* Create asset events. -* @param data The data for the request. -* @param data.requestBody -* @returns AssetEventResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceCreateAssetEvent = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => AssetService.createAssetEvent({ requestBody }) as unknown as Promise, ...options }); -/** -* Materialize Asset -* Materialize an asset by triggering a DAG run that produces it. -* @param data The data for the request. -* @param data.assetId -* @returns DAGRunResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceMaterializeAsset = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ assetId }) => AssetService.materializeAsset({ assetId }) as unknown as Promise, ...options }); -/** -* Create Backfill -* @param data The data for the request. -* @param data.requestBody -* @returns BackfillResponse Successful Response -* @throws ApiError -*/ -export const useBackfillServiceCreateBackfill = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => BackfillService.createBackfill({ requestBody }) as unknown as Promise, ...options }); -/** -* Create Backfill Dry Run -* @param data The data for the request. -* @param data.requestBody -* @returns DryRunBackfillCollectionResponse Successful Response -* @throws ApiError -*/ -export const useBackfillServiceCreateBackfillDryRun = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => BackfillService.createBackfillDryRun({ requestBody }) as unknown as Promise, ...options }); -/** -* Post Connection -* Create connection entry. -* @param data The data for the request. -* @param data.requestBody -* @returns ConnectionResponse Successful Response -* @throws ApiError -*/ -export const useConnectionServicePostConnection = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => ConnectionService.postConnection({ requestBody }) as unknown as Promise, ...options }); -/** -* Test Connection -* Test an API connection. -* -* This method first creates an in-memory transient conn_id & exports that to an env var, -* as some hook classes tries to find out the `conn` from their __init__ method & errors out if not found. -* It also deletes the conn id env connection after the test. -* @param data The data for the request. -* @param data.requestBody -* @returns ConnectionTestResponse Successful Response -* @throws ApiError -*/ -export const useConnectionServiceTestConnection = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => ConnectionService.testConnection({ requestBody }) as unknown as Promise, ...options }); -/** -* Create Default Connections -* Create default connections. -* @returns void Successful Response -* @throws ApiError -*/ -export const useConnectionServiceCreateDefaultConnections = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: () => ConnectionService.createDefaultConnections() as unknown as Promise, ...options }); -/** -* Clear Dag Run -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.requestBody -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useDagRunServiceClearDagRun = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody }) => DagRunService.clearDagRun({ dagId, dagRunId, requestBody }) as unknown as Promise, ...options }); -/** -* Trigger Dag Run -* Trigger a DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.requestBody -* @returns DAGRunResponse Successful Response -* @throws ApiError -*/ -export const useDagRunServiceTriggerDagRun = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, requestBody }) => DagRunService.triggerDagRun({ dagId, requestBody }) as unknown as Promise, ...options }); -/** -* Get List Dag Runs Batch -* Get a list of DAG Runs. -* @param data The data for the request. -* @param data.dagId -* @param data.requestBody -* @returns DAGRunCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagRunServiceGetListDagRunsBatch = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, requestBody }) => DagRunService.getListDagRunsBatch({ dagId, requestBody }) as unknown as Promise, ...options }); -/** -* Get Task Instances Batch -* Get list of task instances. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.requestBody -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstancesBatch = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody }) => TaskInstanceService.getTaskInstancesBatch({ dagId, dagRunId, requestBody }) as unknown as Promise, ...options }); -/** -* Post Clear Task Instances -* Clear task instances. -* @param data The data for the request. -* @param data.dagId -* @param data.requestBody -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServicePostClearTaskInstances = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, requestBody }) => TaskInstanceService.postClearTaskInstances({ dagId, requestBody }) as unknown as Promise, ...options }); -/** -* Post Pool -* Create a Pool. -* @param data The data for the request. -* @param data.requestBody -* @returns PoolResponse Successful Response -* @throws ApiError -*/ -export const usePoolServicePostPool = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => PoolService.postPool({ requestBody }) as unknown as Promise, ...options }); -/** -* Create Xcom Entry -* Create an XCom entry. -* @param data The data for the request. -* @param data.dagId -* @param data.taskId -* @param data.dagRunId -* @param data.requestBody -* @returns XComResponseNative Successful Response -* @throws ApiError -*/ -export const useXcomServiceCreateXcomEntry = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody, taskId }) => XcomService.createXcomEntry({ dagId, dagRunId, requestBody, taskId }) as unknown as Promise, ...options }); -/** -* Post Variable -* Create a variable. -* @param data The data for the request. -* @param data.requestBody -* @returns VariableResponse Successful Response -* @throws ApiError -*/ -export const useVariableServicePostVariable = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => VariableService.postVariable({ requestBody }) as unknown as Promise, ...options }); -/** -* Pause Backfill -* @param data The data for the request. -* @param data.backfillId -* @returns BackfillResponse Successful Response -* @throws ApiError -*/ -export const useBackfillServicePauseBackfill = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ backfillId }) => BackfillService.pauseBackfill({ backfillId }) as unknown as Promise, ...options }); -/** -* Unpause Backfill -* @param data The data for the request. -* @param data.backfillId -* @returns BackfillResponse Successful Response -* @throws ApiError -*/ -export const useBackfillServiceUnpauseBackfill = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ backfillId }) => BackfillService.unpauseBackfill({ backfillId }) as unknown as Promise, ...options }); -/** -* Cancel Backfill -* @param data The data for the request. -* @param data.backfillId -* @returns BackfillResponse Successful Response -* @throws ApiError -*/ -export const useBackfillServiceCancelBackfill = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ backfillId }) => BackfillService.cancelBackfill({ backfillId }) as unknown as Promise, ...options }); -/** -* Reparse Dag File -* Request re-parsing a DAG file. -* @param data The data for the request. -* @param data.fileToken -* @returns null Successful Response -* @throws ApiError -*/ -export const useDagParsingServiceReparseDagFile = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ fileToken }) => DagParsingService.reparseDagFile({ fileToken }) as unknown as Promise, ...options }); -/** -* Patch Connection -* Update a connection entry. -* @param data The data for the request. -* @param data.connectionId -* @param data.requestBody -* @param data.updateMask -* @returns ConnectionResponse Successful Response -* @throws ApiError -*/ -export const useConnectionServicePatchConnection = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ connectionId, requestBody, updateMask }) => ConnectionService.patchConnection({ connectionId, requestBody, updateMask }) as unknown as Promise, ...options }); -/** -* Bulk Connections -* Bulk create, update, and delete connections. -* @param data The data for the request. -* @param data.requestBody -* @returns BulkResponse Successful Response -* @throws ApiError -*/ -export const useConnectionServiceBulkConnections = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => ConnectionService.bulkConnections({ requestBody }) as unknown as Promise, ...options }); -/** -* Patch Dag Run -* Modify a DAG Run. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.requestBody -* @param data.updateMask -* @returns DAGRunResponse Successful Response -* @throws ApiError -*/ -export const useDagRunServicePatchDagRun = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody, updateMask }) => DagRunService.patchDagRun({ dagId, dagRunId, requestBody, updateMask }) as unknown as Promise, ...options }); -/** -* Patch Dags -* Patch multiple DAGs. -* @param data The data for the request. -* @param data.requestBody -* @param data.updateMask -* @param data.limit -* @param data.offset -* @param data.tags -* @param data.tagsMatchMode -* @param data.owners -* @param data.dagIdPattern -* @param data.excludeStale -* @param data.paused -* @returns DAGCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagServicePatchDags = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagIdPattern, excludeStale, limit, offset, owners, paused, requestBody, tags, tagsMatchMode, updateMask }) => DagService.patchDags({ dagIdPattern, excludeStale, limit, offset, owners, paused, requestBody, tags, tagsMatchMode, updateMask }) as unknown as Promise, ...options }); -/** -* Patch Dag -* Patch the specific DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.requestBody -* @param data.updateMask -* @returns DAGResponse Successful Response -* @throws ApiError -*/ -export const useDagServicePatchDag = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, requestBody, updateMask }) => DagService.patchDag({ dagId, requestBody, updateMask }) as unknown as Promise, ...options }); -/** -* Patch Task Instance -* Update a task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.requestBody -* @param data.mapIndex -* @param data.updateMask -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServicePatchTaskInstance = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => TaskInstanceService.patchTaskInstance({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) as unknown as Promise, ...options }); -/** -* Patch Task Instance -* Update a task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @param data.requestBody -* @param data.updateMask -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServicePatchTaskInstanceByMapIndex = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => TaskInstanceService.patchTaskInstanceByMapIndex({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) as unknown as Promise, ...options }); -/** -* Patch Task Instance Dry Run -* Update a task instance dry_run mode. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @param data.requestBody -* @param data.updateMask -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServicePatchTaskInstanceDryRunByMapIndex = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => TaskInstanceService.patchTaskInstanceDryRunByMapIndex({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) as unknown as Promise, ...options }); -/** -* Patch Task Instance Dry Run -* Update a task instance dry_run mode. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.requestBody -* @param data.mapIndex -* @param data.updateMask -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServicePatchTaskInstanceDryRun = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) => TaskInstanceService.patchTaskInstanceDryRun({ dagId, dagRunId, mapIndex, requestBody, taskId, updateMask }) as unknown as Promise, ...options }); -/** -* Patch Pool -* Update a Pool. -* @param data The data for the request. -* @param data.poolName -* @param data.requestBody -* @param data.updateMask -* @returns PoolResponse Successful Response -* @throws ApiError -*/ -export const usePoolServicePatchPool = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ poolName, requestBody, updateMask }) => PoolService.patchPool({ poolName, requestBody, updateMask }) as unknown as Promise, ...options }); -/** -* Bulk Pools -* Bulk create, update, and delete pools. -* @param data The data for the request. -* @param data.requestBody -* @returns BulkResponse Successful Response -* @throws ApiError -*/ -export const usePoolServiceBulkPools = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => PoolService.bulkPools({ requestBody }) as unknown as Promise, ...options }); -/** -* Update Xcom Entry -* Update an existing XCom entry. -* @param data The data for the request. -* @param data.dagId -* @param data.taskId -* @param data.dagRunId -* @param data.xcomKey -* @param data.requestBody -* @returns XComResponseNative Successful Response -* @throws ApiError -*/ -export const useXcomServiceUpdateXcomEntry = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId, requestBody, taskId, xcomKey }) => XcomService.updateXcomEntry({ dagId, dagRunId, requestBody, taskId, xcomKey }) as unknown as Promise, ...options }); -/** -* Patch Variable -* Update a variable by key. -* @param data The data for the request. -* @param data.variableKey -* @param data.requestBody -* @param data.updateMask -* @returns VariableResponse Successful Response -* @throws ApiError -*/ -export const useVariableServicePatchVariable = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody, updateMask, variableKey }) => VariableService.patchVariable({ requestBody, updateMask, variableKey }) as unknown as Promise, ...options }); -/** -* Bulk Variables -* Bulk create, update, and delete variables. -* @param data The data for the request. -* @param data.requestBody -* @returns BulkResponse Successful Response -* @throws ApiError -*/ -export const useVariableServiceBulkVariables = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ requestBody }) => VariableService.bulkVariables({ requestBody }) as unknown as Promise, ...options }); -/** -* Delete Asset Queued Events -* Delete queued asset events for an asset. -* @param data The data for the request. -* @param data.assetId -* @param data.before -* @returns void Successful Response -* @throws ApiError -*/ -export const useAssetServiceDeleteAssetQueuedEvents = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ assetId, before }) => AssetService.deleteAssetQueuedEvents({ assetId, before }) as unknown as Promise, ...options }); -/** -* Delete Dag Asset Queued Events -* @param data The data for the request. -* @param data.dagId -* @param data.before -* @returns void Successful Response -* @throws ApiError -*/ -export const useAssetServiceDeleteDagAssetQueuedEvents = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ before, dagId }) => AssetService.deleteDagAssetQueuedEvents({ before, dagId }) as unknown as Promise, ...options }); -/** -* Delete Dag Asset Queued Event -* Delete a queued asset event for a DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.assetId -* @param data.before -* @returns void Successful Response -* @throws ApiError -*/ -export const useAssetServiceDeleteDagAssetQueuedEvent = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ assetId, before, dagId }) => AssetService.deleteDagAssetQueuedEvent({ assetId, before, dagId }) as unknown as Promise, ...options }); -/** -* Delete Connection -* Delete a connection entry. -* @param data The data for the request. -* @param data.connectionId -* @returns void Successful Response -* @throws ApiError -*/ -export const useConnectionServiceDeleteConnection = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ connectionId }) => ConnectionService.deleteConnection({ connectionId }) as unknown as Promise, ...options }); -/** -* Delete Dag Run -* Delete a DAG Run entry. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @returns void Successful Response -* @throws ApiError -*/ -export const useDagRunServiceDeleteDagRun = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId, dagRunId }) => DagRunService.deleteDagRun({ dagId, dagRunId }) as unknown as Promise, ...options }); -/** -* Delete Dag -* Delete the specific DAG. -* @param data The data for the request. -* @param data.dagId -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useDagServiceDeleteDag = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ dagId }) => DagService.deleteDag({ dagId }) as unknown as Promise, ...options }); -/** -* Delete Pool -* Delete a pool entry. -* @param data The data for the request. -* @param data.poolName -* @returns void Successful Response -* @throws ApiError -*/ -export const usePoolServiceDeletePool = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ poolName }) => PoolService.deletePool({ poolName }) as unknown as Promise, ...options }); -/** -* Delete Variable -* Delete a variable entry. -* @param data The data for the request. -* @param data.variableKey -* @returns void Successful Response -* @throws ApiError -*/ -export const useVariableServiceDeleteVariable = (options?: Omit, "mutationFn">) => useMutation({ mutationFn: ({ variableKey }) => VariableService.deleteVariable({ variableKey }) as unknown as Promise, ...options }); + * Delete Variable + * Delete a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns void Successful Response + * @throws ApiError + */ +export const useVariableServiceDeleteVariable = < + TData = Common.VariableServiceDeleteVariableMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + variableKey: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + variableKey: string; + }, + TContext + >({ + mutationFn: ({ variableKey }) => + VariableService.deleteVariable({ variableKey }) as unknown as Promise, + ...options, + }); diff --git a/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts b/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts index 38573aa012b28..d525b0a662c39 100644 --- a/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow-core/src/airflow/ui/openapi-gen/queries/suspense.ts @@ -1,1234 +1,2997 @@ -// generated with @7nohe/openapi-react-query-codegen@1.6.2 - +// generated with @7nohe/openapi-react-query-codegen@1.6.2 import { UseQueryOptions, useSuspenseQuery } from "@tanstack/react-query"; -import { AssetService, AuthLinksService, BackfillService, ConfigService, ConnectionService, DagReportService, DagRunService, DagService, DagSourceService, DagStatsService, DagVersionService, DagWarningService, DashboardService, DependenciesService, EventLogService, ExtraLinksService, GridService, ImportErrorService, JobService, LoginService, MonitorService, PluginService, PoolService, ProviderService, StructureService, TaskInstanceService, TaskService, VariableService, VersionService, XcomService } from "../requests/services.gen"; + +import { + AssetService, + AuthLinksService, + BackfillService, + ConfigService, + ConnectionService, + DagReportService, + DagRunService, + DagService, + DagSourceService, + DagStatsService, + DagVersionService, + DagWarningService, + DashboardService, + DependenciesService, + EventLogService, + ExtraLinksService, + GridService, + ImportErrorService, + JobService, + LoginService, + MonitorService, + PluginService, + PoolService, + ProviderService, + StructureService, + TaskInstanceService, + TaskService, + VariableService, + VersionService, + XcomService, +} from "../requests/services.gen"; import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; + /** -* Get Assets -* Get assets. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.namePattern -* @param data.uriPattern -* @param data.dagIds -* @param data.onlyActive -* @param data.orderBy -* @returns AssetCollectionResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetAssetsSuspense = = unknown[]>({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }: { - dagIds?: string[]; - limit?: number; - namePattern?: string; - offset?: number; - onlyActive?: boolean; - orderBy?: string; - uriPattern?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetAssetsKeyFn({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }, queryKey), queryFn: () => AssetService.getAssets({ dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }) as TData, ...options }); -/** -* Get Asset Aliases -* Get asset aliases. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.namePattern -* @param data.orderBy -* @returns AssetAliasCollectionResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetAssetAliasesSuspense = = unknown[]>({ limit, namePattern, offset, orderBy }: { - limit?: number; - namePattern?: string; - offset?: number; - orderBy?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }, queryKey), queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }) as TData, ...options }); -/** -* Get Asset Alias -* Get an asset alias. -* @param data The data for the request. -* @param data.assetAliasId -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetAssetAliasSuspense = = unknown[]>({ assetAliasId }: { - assetAliasId: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }, queryKey), queryFn: () => AssetService.getAssetAlias({ assetAliasId }) as TData, ...options }); -/** -* Get Asset Events -* Get asset events. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.assetId -* @param data.sourceDagId -* @param data.sourceTaskId -* @param data.sourceRunId -* @param data.sourceMapIndex -* @param data.timestampGte -* @param data.timestampLte -* @returns AssetEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetAssetEventsSuspense = = unknown[]>({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }: { - assetId?: number; - limit?: number; - offset?: number; - orderBy?: string; - sourceDagId?: string; - sourceMapIndex?: number; - sourceRunId?: string; - sourceTaskId?: string; - timestampGte?: string; - timestampLte?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetAssetEventsKeyFn({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }, queryKey), queryFn: () => AssetService.getAssetEvents({ assetId, limit, offset, orderBy, sourceDagId, sourceMapIndex, sourceRunId, sourceTaskId, timestampGte, timestampLte }) as TData, ...options }); -/** -* Get Asset Queued Events -* Get queued asset events for an asset. -* @param data The data for the request. -* @param data.assetId -* @param data.before -* @returns QueuedEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetAssetQueuedEventsSuspense = = unknown[]>({ assetId, before }: { - assetId: number; - before?: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }, queryKey), queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }) as TData, ...options }); -/** -* Get Asset -* Get an asset. -* @param data The data for the request. -* @param data.assetId -* @returns AssetResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetAssetSuspense = = unknown[]>({ assetId }: { - assetId: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }, queryKey), queryFn: () => AssetService.getAsset({ assetId }) as TData, ...options }); -/** -* Get Dag Asset Queued Events -* Get queued asset events for a DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.before -* @returns QueuedEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetDagAssetQueuedEventsSuspense = = unknown[]>({ before, dagId }: { - before?: string; - dagId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }, queryKey), queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }) as TData, ...options }); -/** -* Get Dag Asset Queued Event -* Get a queued asset event for a DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.assetId -* @param data.before -* @returns QueuedEventResponse Successful Response -* @throws ApiError -*/ -export const useAssetServiceGetDagAssetQueuedEventSuspense = = unknown[]>({ assetId, before, dagId }: { - assetId: number; - before?: string; - dagId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }, queryKey), queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }) as TData, ...options }); -/** -* Next Run Assets -* @param data The data for the request. -* @param data.dagId -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useAssetServiceNextRunAssetsSuspense = = unknown[]>({ dagId }: { - dagId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }, queryKey), queryFn: () => AssetService.nextRunAssets({ dagId }) as TData, ...options }); -/** -* List Backfills -* @param data The data for the request. -* @param data.dagId -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns BackfillCollectionResponse Successful Response -* @throws ApiError -*/ -export const useBackfillServiceListBackfillsSuspense = = unknown[]>({ dagId, limit, offset, orderBy }: { - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }, queryKey), queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }) as TData, ...options }); -/** -* Get Backfill -* @param data The data for the request. -* @param data.backfillId -* @returns BackfillResponse Successful Response -* @throws ApiError -*/ -export const useBackfillServiceGetBackfillSuspense = = unknown[]>({ backfillId }: { - backfillId: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }, queryKey), queryFn: () => BackfillService.getBackfill({ backfillId }) as TData, ...options }); -/** -* List Backfills -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.dagId -* @param data.active -* @returns BackfillCollectionResponse Successful Response -* @throws ApiError -*/ -export const useBackfillServiceListBackfills1Suspense = = unknown[]>({ active, dagId, limit, offset, orderBy }: { - active?: boolean; - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseBackfillServiceListBackfills1KeyFn({ active, dagId, limit, offset, orderBy }, queryKey), queryFn: () => BackfillService.listBackfills1({ active, dagId, limit, offset, orderBy }) as TData, ...options }); -/** -* Get Connection -* Get a connection entry. -* @param data The data for the request. -* @param data.connectionId -* @returns ConnectionResponse Successful Response -* @throws ApiError -*/ -export const useConnectionServiceGetConnectionSuspense = = unknown[]>({ connectionId }: { - connectionId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }, queryKey), queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, ...options }); -/** -* Get Connections -* Get all connection entries. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.connectionIdPattern -* @returns ConnectionCollectionResponse Successful Response -* @throws ApiError -*/ -export const useConnectionServiceGetConnectionsSuspense = = unknown[]>({ connectionIdPattern, limit, offset, orderBy }: { - connectionIdPattern?: string; - limit?: number; - offset?: number; - orderBy?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseConnectionServiceGetConnectionsKeyFn({ connectionIdPattern, limit, offset, orderBy }, queryKey), queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }) as TData, ...options }); -/** -* Hook Meta Data -* Retrieve information about available connection types (hook classes) and their parameters. -* @returns ConnectionHookMetaData Successful Response -* @throws ApiError -*/ -export const useConnectionServiceHookMetaDataSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(queryKey), queryFn: () => ConnectionService.hookMetaData() as TData, ...options }); -/** -* Get Dag Run -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @returns DAGRunResponse Successful Response -* @throws ApiError -*/ -export const useDagRunServiceGetDagRunSuspense = = unknown[]>({ dagId, dagRunId }: { - dagId: string; - dagRunId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }, queryKey), queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, ...options }); -/** -* Get Upstream Asset Events -* If dag run is asset-triggered, return the asset events that triggered it. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @returns AssetEventCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagRunServiceGetUpstreamAssetEventsSuspense = = unknown[]>({ dagId, dagRunId }: { - dagId: string; - dagRunId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }, queryKey), queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }) as TData, ...options }); -/** -* Get Dag Runs -* Get all DAG Runs. -* -* This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. -* @param data The data for the request. -* @param data.dagId -* @param data.limit -* @param data.offset -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.updatedAtGte -* @param data.updatedAtLte -* @param data.runType -* @param data.state -* @param data.orderBy -* @returns DAGRunCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagRunServiceGetDagRunsSuspense = = unknown[]>({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }: { - dagId: string; - endDateGte?: string; - endDateLte?: string; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - updatedAtGte?: string; - updatedAtLte?: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagRunServiceGetDagRunsKeyFn({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }, queryKey), queryFn: () => DagRunService.getDagRuns({ dagId, endDateGte, endDateLte, limit, logicalDateGte, logicalDateLte, offset, orderBy, runAfterGte, runAfterLte, runType, startDateGte, startDateLte, state, updatedAtGte, updatedAtLte }) as TData, ...options }); -/** -* Get Dag Source -* Get source code using file token. -* @param data The data for the request. -* @param data.dagId -* @param data.versionNumber -* @param data.accept -* @returns DAGSourceResponse Successful Response -* @throws ApiError -*/ -export const useDagSourceServiceGetDagSourceSuspense = = unknown[]>({ accept, dagId, versionNumber }: { - accept?: "application/json" | "text/plain" | "*/*"; - dagId: string; - versionNumber?: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }, queryKey), queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }) as TData, ...options }); -/** -* Get Dag Stats -* Get Dag statistics. -* @param data The data for the request. -* @param data.dagIds -* @returns DagStatsCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagStatsServiceGetDagStatsSuspense = = unknown[]>({ dagIds }: { - dagIds?: string[]; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }, queryKey), queryFn: () => DagStatsService.getDagStats({ dagIds }) as TData, ...options }); -/** -* Get Dag Reports -* Get DAG report. -* @param data The data for the request. -* @param data.subdir -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useDagReportServiceGetDagReportsSuspense = = unknown[]>({ subdir }: { - subdir: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }, queryKey), queryFn: () => DagReportService.getDagReports({ subdir }) as TData, ...options }); -/** -* Get Config -* @param data The data for the request. -* @param data.section -* @param data.accept -* @returns Config Successful Response -* @throws ApiError -*/ -export const useConfigServiceGetConfigSuspense = = unknown[]>({ accept, section }: { - accept?: "application/json" | "text/plain" | "*/*"; - section?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }, queryKey), queryFn: () => ConfigService.getConfig({ accept, section }) as TData, ...options }); -/** -* Get Config Value -* @param data The data for the request. -* @param data.section -* @param data.option -* @param data.accept -* @returns Config Successful Response -* @throws ApiError -*/ -export const useConfigServiceGetConfigValueSuspense = = unknown[]>({ accept, option, section }: { - accept?: "application/json" | "text/plain" | "*/*"; - option: string; - section: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }, queryKey), queryFn: () => ConfigService.getConfigValue({ accept, option, section }) as TData, ...options }); -/** -* Get Configs -* Get configs for UI. -* @returns ConfigResponse Successful Response -* @throws ApiError -*/ -export const useConfigServiceGetConfigsSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseConfigServiceGetConfigsKeyFn(queryKey), queryFn: () => ConfigService.getConfigs() as TData, ...options }); -/** -* List Dag Warnings -* Get a list of DAG warnings. -* @param data The data for the request. -* @param data.dagId -* @param data.warningType -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns DAGWarningCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagWarningServiceListDagWarningsSuspense = = unknown[]>({ dagId, limit, offset, orderBy, warningType }: { - dagId?: string; - limit?: number; - offset?: number; - orderBy?: string; - warningType?: DagWarningType; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ dagId, limit, offset, orderBy, warningType }, queryKey), queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }) as TData, ...options }); -/** -* Get Dags -* Get all DAGs. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.tags -* @param data.tagsMatchMode -* @param data.owners -* @param data.dagIdPattern -* @param data.dagDisplayNamePattern -* @param data.excludeStale -* @param data.paused -* @param data.lastDagRunState -* @param data.dagRunStartDateGte -* @param data.dagRunStartDateLte -* @param data.dagRunEndDateGte -* @param data.dagRunEndDateLte -* @param data.dagRunState -* @param data.orderBy -* @returns DAGCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagServiceGetDagsSuspense = = unknown[]>({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagRunEndDateGte?: string; - dagRunEndDateLte?: string; - dagRunStartDateGte?: string; - dagRunStartDateLte?: string; - dagRunState?: string[]; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - orderBy?: string; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.getDags({ dagDisplayNamePattern, dagIdPattern, dagRunEndDateGte, dagRunEndDateLte, dagRunStartDateGte, dagRunStartDateLte, dagRunState, excludeStale, lastDagRunState, limit, offset, orderBy, owners, paused, tags, tagsMatchMode }) as TData, ...options }); -/** -* Get Dag -* Get basic information about a DAG. -* @param data The data for the request. -* @param data.dagId -* @returns DAGResponse Successful Response -* @throws ApiError -*/ -export const useDagServiceGetDagSuspense = = unknown[]>({ dagId }: { - dagId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }, queryKey), queryFn: () => DagService.getDag({ dagId }) as TData, ...options }); -/** -* Get Dag Details -* Get details of DAG. -* @param data The data for the request. -* @param data.dagId -* @returns DAGDetailsResponse Successful Response -* @throws ApiError -*/ -export const useDagServiceGetDagDetailsSuspense = = unknown[]>({ dagId }: { - dagId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }, queryKey), queryFn: () => DagService.getDagDetails({ dagId }) as TData, ...options }); -/** -* Get Dag Tags -* Get all DAG tags. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.tagNamePattern -* @returns DAGTagCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagServiceGetDagTagsSuspense = = unknown[]>({ limit, offset, orderBy, tagNamePattern }: { - limit?: number; - offset?: number; - orderBy?: string; - tagNamePattern?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }, queryKey), queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }) as TData, ...options }); -/** -* Recent Dag Runs -* Get recent DAG runs. -* @param data The data for the request. -* @param data.dagRunsLimit -* @param data.limit -* @param data.offset -* @param data.tags -* @param data.tagsMatchMode -* @param data.owners -* @param data.dagIds -* @param data.dagIdPattern -* @param data.dagDisplayNamePattern -* @param data.excludeStale -* @param data.paused -* @param data.lastDagRunState -* @returns DAGWithLatestDagRunsCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagServiceRecentDagRunsSuspense = = unknown[]>({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }: { - dagDisplayNamePattern?: string; - dagIdPattern?: string; - dagIds?: string[]; - dagRunsLimit?: number; - excludeStale?: boolean; - lastDagRunState?: DagRunState; - limit?: number; - offset?: number; - owners?: string[]; - paused?: boolean; - tags?: string[]; - tagsMatchMode?: "any" | "all"; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagServiceRecentDagRunsKeyFn({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }, queryKey), queryFn: () => DagService.recentDagRuns({ dagDisplayNamePattern, dagIdPattern, dagIds, dagRunsLimit, excludeStale, lastDagRunState, limit, offset, owners, paused, tags, tagsMatchMode }) as TData, ...options }); -/** -* Get Event Log -* @param data The data for the request. -* @param data.eventLogId -* @returns EventLogResponse Successful Response -* @throws ApiError -*/ -export const useEventLogServiceGetEventLogSuspense = = unknown[]>({ eventLogId }: { - eventLogId: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }, queryKey), queryFn: () => EventLogService.getEventLog({ eventLogId }) as TData, ...options }); -/** -* Get Event Logs -* Get all Event Logs. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.dagId -* @param data.taskId -* @param data.runId -* @param data.mapIndex -* @param data.tryNumber -* @param data.owner -* @param data.event -* @param data.excludedEvents -* @param data.includedEvents -* @param data.before -* @param data.after -* @returns EventLogCollectionResponse Successful Response -* @throws ApiError -*/ -export const useEventLogServiceGetEventLogsSuspense = = unknown[]>({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }: { - after?: string; - before?: string; - dagId?: string; - event?: string; - excludedEvents?: string[]; - includedEvents?: string[]; - limit?: number; - mapIndex?: number; - offset?: number; - orderBy?: string; - owner?: string; - runId?: string; - taskId?: string; - tryNumber?: number; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseEventLogServiceGetEventLogsKeyFn({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }, queryKey), queryFn: () => EventLogService.getEventLogs({ after, before, dagId, event, excludedEvents, includedEvents, limit, mapIndex, offset, orderBy, owner, runId, taskId, tryNumber }) as TData, ...options }); -/** -* Get Extra Links -* Get extra links for task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns ExtraLinkCollectionResponse Successful Response -* @throws ApiError -*/ -export const useExtraLinksServiceGetExtraLinksSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Extra Links -* Get extra links for task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns ExtraLinkCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetExtraLinksSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Task Instance -* Get task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @returns TaskInstanceResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstanceSuspense = = unknown[]>({ dagId, dagRunId, taskId }: { - dagId: string; - dagRunId: string; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) as TData, ...options }); -/** -* Get Mapped Task Instances -* Get list of mapped task instances. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.updatedAtGte -* @param data.updatedAtLte -* @param data.durationGte -* @param data.durationLte -* @param data.state -* @param data.pool -* @param data.queue -* @param data.executor -* @param data.versionNumber -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetMappedTaskInstancesSuspense = = unknown[]>({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskId: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskId, updatedAtGte, updatedAtLte, versionNumber }) as TData, ...options }); -/** -* Get Task Instance Dependencies -* Get dependencies blocking task from getting scheduled. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskDependencyCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceDependenciesByMapIndex({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Task Instance Dependencies -* Get dependencies blocking task from getting scheduled. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskDependencyCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstanceDependenciesSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Task Instance Tries -* Get list of task instances history. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskInstanceHistoryCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstanceTriesSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Mapped Task Instance Tries -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskInstanceHistoryCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetMappedTaskInstanceTriesSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Mapped Task Instance -* Get task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.mapIndex -* @returns TaskInstanceResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetMappedTaskInstanceSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }) as TData, ...options }); -/** -* Get Task Instances -* Get list of task instances. -* -* This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs -* and DAG runs. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.updatedAtGte -* @param data.updatedAtLte -* @param data.durationGte -* @param data.durationLte -* @param data.taskDisplayNamePattern -* @param data.state -* @param data.pool -* @param data.queue -* @param data.executor -* @param data.versionNumber -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns TaskInstanceCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstancesSuspense = = unknown[]>({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }: { - dagId: string; - dagRunId: string; - durationGte?: number; - durationLte?: number; - endDateGte?: string; - endDateLte?: string; - executor?: string[]; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - pool?: string[]; - queue?: string[]; - runAfterGte?: string; - runAfterLte?: string; - startDateGte?: string; - startDateLte?: string; - state?: string[]; - taskDisplayNamePattern?: string; - taskId?: string; - updatedAtGte?: string; - updatedAtLte?: string; - versionNumber?: number[]; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }, queryKey), queryFn: () => TaskInstanceService.getTaskInstances({ dagId, dagRunId, durationGte, durationLte, endDateGte, endDateLte, executor, limit, logicalDateGte, logicalDateLte, offset, orderBy, pool, queue, runAfterGte, runAfterLte, startDateGte, startDateLte, state, taskDisplayNamePattern, taskId, updatedAtGte, updatedAtLte, versionNumber }) as TData, ...options }); -/** -* Get Task Instance Try Details -* Get task instance details by try number. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.taskTryNumber -* @param data.mapIndex -* @returns TaskInstanceHistoryResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetTaskInstanceTryDetailsSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { - dagId: string; - dagRunId: string; - mapIndex?: number; - taskId: string; - taskTryNumber: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }, queryKey), queryFn: () => TaskInstanceService.getTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) as TData, ...options }); -/** -* Get Mapped Task Instance Try Details -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.taskTryNumber -* @param data.mapIndex -* @returns TaskInstanceHistoryResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetMappedTaskInstanceTryDetailsSuspense = = unknown[]>({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }: { - dagId: string; - dagRunId: string; - mapIndex: number; - taskId: string; - taskTryNumber: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }, queryKey), queryFn: () => TaskInstanceService.getMappedTaskInstanceTryDetails({ dagId, dagRunId, mapIndex, taskId, taskTryNumber }) as TData, ...options }); -/** -* Get Log -* Get logs for a specific task instance. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.tryNumber -* @param data.fullContent -* @param data.mapIndex -* @param data.token -* @param data.accept -* @returns TaskInstancesLogResponse Successful Response -* @throws ApiError -*/ -export const useTaskInstanceServiceGetLogSuspense = = unknown[]>({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }: { - accept?: "application/json" | "*/*" | "application/x-ndjson"; - dagId: string; - dagRunId: string; - fullContent?: boolean; - mapIndex?: number; - taskId: string; - token?: string; - tryNumber: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskInstanceServiceGetLogKeyFn({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }, queryKey), queryFn: () => TaskInstanceService.getLog({ accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }) as TData, ...options }); -/** -* Get Import Error -* Get an import error. -* @param data The data for the request. -* @param data.importErrorId -* @returns ImportErrorResponse Successful Response -* @throws ApiError -*/ -export const useImportErrorServiceGetImportErrorSuspense = = unknown[]>({ importErrorId }: { - importErrorId: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }, queryKey), queryFn: () => ImportErrorService.getImportError({ importErrorId }) as TData, ...options }); -/** -* Get Import Errors -* Get all import errors. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @returns ImportErrorCollectionResponse Successful Response -* @throws ApiError -*/ -export const useImportErrorServiceGetImportErrorsSuspense = = unknown[]>({ limit, offset, orderBy }: { - limit?: number; - offset?: number; - orderBy?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }, queryKey), queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, ...options }); -/** -* Get Jobs -* Get all jobs. -* @param data The data for the request. -* @param data.isAlive -* @param data.startDateGte -* @param data.startDateLte -* @param data.endDateGte -* @param data.endDateLte -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.jobState -* @param data.jobType -* @param data.hostname -* @param data.executorClass -* @returns JobCollectionResponse Successful Response -* @throws ApiError -*/ -export const useJobServiceGetJobsSuspense = = unknown[]>({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }: { - endDateGte?: string; - endDateLte?: string; - executorClass?: string; - hostname?: string; - isAlive?: boolean; - jobState?: string; - jobType?: string; - limit?: number; - offset?: number; - orderBy?: string; - startDateGte?: string; - startDateLte?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseJobServiceGetJobsKeyFn({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }, queryKey), queryFn: () => JobService.getJobs({ endDateGte, endDateLte, executorClass, hostname, isAlive, jobState, jobType, limit, offset, orderBy, startDateGte, startDateLte }) as TData, ...options }); -/** -* Get Plugins -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @returns PluginCollectionResponse Successful Response -* @throws ApiError -*/ -export const usePluginServiceGetPluginsSuspense = = unknown[]>({ limit, offset }: { - limit?: number; - offset?: number; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }, queryKey), queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, ...options }); -/** -* Get Pool -* Get a pool. -* @param data The data for the request. -* @param data.poolName -* @returns PoolResponse Successful Response -* @throws ApiError -*/ -export const usePoolServiceGetPoolSuspense = = unknown[]>({ poolName }: { - poolName: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), queryFn: () => PoolService.getPool({ poolName }) as TData, ...options }); -/** -* Get Pools -* Get all pools entries. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.poolNamePattern -* @returns PoolCollectionResponse Successful Response -* @throws ApiError -*/ -export const usePoolServiceGetPoolsSuspense = = unknown[]>({ limit, offset, orderBy, poolNamePattern }: { - limit?: number; - offset?: number; - orderBy?: string; - poolNamePattern?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }, queryKey), queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }) as TData, ...options }); -/** -* Get Providers -* Get providers. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @returns ProviderCollectionResponse Successful Response -* @throws ApiError -*/ -export const useProviderServiceGetProvidersSuspense = = unknown[]>({ limit, offset }: { - limit?: number; - offset?: number; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }, queryKey), queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, ...options }); -/** -* Get Xcom Entry -* Get an XCom entry. -* @param data The data for the request. -* @param data.dagId -* @param data.taskId -* @param data.dagRunId -* @param data.xcomKey -* @param data.mapIndex -* @param data.deserialize -* @param data.stringify -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useXcomServiceGetXcomEntrySuspense = = unknown[]>({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }: { - dagId: string; - dagRunId: string; - deserialize?: boolean; - mapIndex?: number; - stringify?: boolean; - taskId: string; - xcomKey: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseXcomServiceGetXcomEntryKeyFn({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, queryKey), queryFn: () => XcomService.getXcomEntry({ dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }) as TData, ...options }); -/** -* Get Xcom Entries -* Get all XCom entries. -* -* This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. -* @param data The data for the request. -* @param data.dagId -* @param data.dagRunId -* @param data.taskId -* @param data.xcomKey -* @param data.mapIndex -* @param data.limit -* @param data.offset -* @returns XComCollectionResponse Successful Response -* @throws ApiError -*/ -export const useXcomServiceGetXcomEntriesSuspense = = unknown[]>({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }: { - dagId: string; - dagRunId: string; - limit?: number; - mapIndex?: number; - offset?: number; - taskId: string; - xcomKey?: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }, queryKey), queryFn: () => XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }) as TData, ...options }); -/** -* Get Tasks -* Get tasks for DAG. -* @param data The data for the request. -* @param data.dagId -* @param data.orderBy -* @returns TaskCollectionResponse Successful Response -* @throws ApiError -*/ -export const useTaskServiceGetTasksSuspense = = unknown[]>({ dagId, orderBy }: { - dagId: string; - orderBy?: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }, queryKey), queryFn: () => TaskService.getTasks({ dagId, orderBy }) as TData, ...options }); -/** -* Get Task -* Get simplified representation of a task. -* @param data The data for the request. -* @param data.dagId -* @param data.taskId -* @returns TaskResponse Successful Response -* @throws ApiError -*/ -export const useTaskServiceGetTaskSuspense = = unknown[]>({ dagId, taskId }: { - dagId: string; - taskId: unknown; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }, queryKey), queryFn: () => TaskService.getTask({ dagId, taskId }) as TData, ...options }); -/** -* Get Variable -* Get a variable entry. -* @param data The data for the request. -* @param data.variableKey -* @returns VariableResponse Successful Response -* @throws ApiError -*/ -export const useVariableServiceGetVariableSuspense = = unknown[]>({ variableKey }: { - variableKey: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }, queryKey), queryFn: () => VariableService.getVariable({ variableKey }) as TData, ...options }); -/** -* Get Variables -* Get all Variables entries. -* @param data The data for the request. -* @param data.limit -* @param data.offset -* @param data.orderBy -* @param data.variableKeyPattern -* @returns VariableCollectionResponse Successful Response -* @throws ApiError -*/ -export const useVariableServiceGetVariablesSuspense = = unknown[]>({ limit, offset, orderBy, variableKeyPattern }: { - limit?: number; - offset?: number; - orderBy?: string; - variableKeyPattern?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseVariableServiceGetVariablesKeyFn({ limit, offset, orderBy, variableKeyPattern }, queryKey), queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }) as TData, ...options }); -/** -* Get Dag Version -* Get one Dag Version. -* @param data The data for the request. -* @param data.dagId -* @param data.versionNumber -* @returns DagVersionResponse Successful Response -* @throws ApiError -*/ -export const useDagVersionServiceGetDagVersionSuspense = = unknown[]>({ dagId, versionNumber }: { - dagId: string; - versionNumber: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }) as TData, ...options }); -/** -* Get Dag Versions -* Get all DAG Versions. -* -* This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. -* @param data The data for the request. -* @param data.dagId -* @param data.limit -* @param data.offset -* @param data.versionNumber -* @param data.bundleName -* @param data.bundleVersion -* @param data.orderBy -* @returns DAGVersionCollectionResponse Successful Response -* @throws ApiError -*/ -export const useDagVersionServiceGetDagVersionsSuspense = = unknown[]>({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }: { - bundleName?: string; - bundleVersion?: string; - dagId: string; - limit?: number; - offset?: number; - orderBy?: string; - versionNumber?: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, queryKey), queryFn: () => DagVersionService.getDagVersions({ bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }) as TData, ...options }); -/** -* Get Health -* @returns HealthInfoResponse Successful Response -* @throws ApiError -*/ -export const useMonitorServiceGetHealthSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), queryFn: () => MonitorService.getHealth() as TData, ...options }); -/** -* Get Version -* Get version information. -* @returns VersionInfo Successful Response -* @throws ApiError -*/ -export const useVersionServiceGetVersionSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), queryFn: () => VersionService.getVersion() as TData, ...options }); -/** -* Login -* Redirect to the login URL depending on the AuthManager configured. -* @param data The data for the request. -* @param data.next -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useLoginServiceLoginSuspense = = unknown[]>({ next }: { - next?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseLoginServiceLoginKeyFn({ next }, queryKey), queryFn: () => LoginService.login({ next }) as TData, ...options }); -/** -* Logout -* Logout the user. -* @param data The data for the request. -* @param data.next -* @returns unknown Successful Response -* @throws ApiError -*/ -export const useLoginServiceLogoutSuspense = = unknown[]>({ next }: { - next?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseLoginServiceLogoutKeyFn({ next }, queryKey), queryFn: () => LoginService.logout({ next }) as TData, ...options }); -/** -* Get Auth Menus -* @returns MenuItemCollectionResponse Successful Response -* @throws ApiError -*/ -export const useAuthLinksServiceGetAuthMenusSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(queryKey), queryFn: () => AuthLinksService.getAuthMenus() as TData, ...options }); -/** -* Get Dependencies -* Dependencies graph. -* @param data The data for the request. -* @param data.nodeId -* @returns BaseGraphResponse Successful Response -* @throws ApiError -*/ -export const useDependenciesServiceGetDependenciesSuspense = = unknown[]>({ nodeId }: { - nodeId?: string; -} = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }, queryKey), queryFn: () => DependenciesService.getDependencies({ nodeId }) as TData, ...options }); -/** -* Historical Metrics -* Return cluster activity historical metrics. -* @param data The data for the request. -* @param data.startDate -* @param data.endDate -* @returns HistoricalMetricDataResponse Successful Response -* @throws ApiError -*/ -export const useDashboardServiceHistoricalMetricsSuspense = = unknown[]>({ endDate, startDate }: { - endDate?: string; - startDate: string; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }, queryKey), queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }) as TData, ...options }); -/** -* Dag Stats -* Return basic DAG stats with counts of DAGs in various states. -* @returns DashboardDagStatsResponse Successful Response -* @throws ApiError -*/ -export const useDashboardServiceDagStatsSuspense = = unknown[]>(queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseDashboardServiceDagStatsKeyFn(queryKey), queryFn: () => DashboardService.dagStats() as TData, ...options }); -/** -* Structure Data -* Get Structure Data. -* @param data The data for the request. -* @param data.dagId -* @param data.includeUpstream -* @param data.includeDownstream -* @param data.root -* @param data.externalDependencies -* @param data.versionNumber -* @returns StructureDataResponse Successful Response -* @throws ApiError -*/ -export const useStructureServiceStructureDataSuspense = = unknown[]>({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }: { - dagId: string; - externalDependencies?: boolean; - includeDownstream?: boolean; - includeUpstream?: boolean; - root?: string; - versionNumber?: number; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseStructureServiceStructureDataKeyFn({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }, queryKey), queryFn: () => StructureService.structureData({ dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }) as TData, ...options }); -/** -* Grid Data -* Return grid data. -* @param data The data for the request. -* @param data.dagId -* @param data.includeUpstream -* @param data.includeDownstream -* @param data.root -* @param data.offset -* @param data.runType -* @param data.state -* @param data.limit -* @param data.orderBy -* @param data.runAfterGte -* @param data.runAfterLte -* @param data.logicalDateGte -* @param data.logicalDateLte -* @returns GridResponse Successful Response -* @throws ApiError -*/ -export const useGridServiceGridDataSuspense = = unknown[]>({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }: { - dagId: string; - includeDownstream?: boolean; - includeUpstream?: boolean; - limit?: number; - logicalDateGte?: string; - logicalDateLte?: string; - offset?: number; - orderBy?: string; - root?: string; - runAfterGte?: string; - runAfterLte?: string; - runType?: string[]; - state?: string[]; -}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">) => useSuspenseQuery({ queryKey: Common.UseGridServiceGridDataKeyFn({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }, queryKey), queryFn: () => GridService.gridData({ dagId, includeDownstream, includeUpstream, limit, logicalDateGte, logicalDateLte, offset, orderBy, root, runAfterGte, runAfterLte, runType, state }) as TData, ...options }); + * Get Assets + * Get assets. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.namePattern + * @param data.uriPattern + * @param data.dagIds + * @param data.onlyActive + * @param data.orderBy + * @returns AssetCollectionResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetAssetsSuspense = < + TData = Common.AssetServiceGetAssetsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagIds, + limit, + namePattern, + offset, + onlyActive, + orderBy, + uriPattern, + }: { + dagIds?: string[]; + limit?: number; + namePattern?: string; + offset?: number; + onlyActive?: boolean; + orderBy?: string; + uriPattern?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseAssetServiceGetAssetsKeyFn( + { dagIds, limit, namePattern, offset, onlyActive, orderBy, uriPattern }, + queryKey, + ), + queryFn: () => + AssetService.getAssets({ + dagIds, + limit, + namePattern, + offset, + onlyActive, + orderBy, + uriPattern, + }) as TData, + ...options, + }); +/** + * Get Asset Aliases + * Get asset aliases. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.namePattern + * @param data.orderBy + * @returns AssetAliasCollectionResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetAssetAliasesSuspense = < + TData = Common.AssetServiceGetAssetAliasesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + namePattern, + offset, + orderBy, + }: { + limit?: number; + namePattern?: string; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseAssetServiceGetAssetAliasesKeyFn({ limit, namePattern, offset, orderBy }, queryKey), + queryFn: () => AssetService.getAssetAliases({ limit, namePattern, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Asset Alias + * Get an asset alias. + * @param data The data for the request. + * @param data.assetAliasId + * @returns unknown Successful Response + * @throws ApiError + */ +export const useAssetServiceGetAssetAliasSuspense = < + TData = Common.AssetServiceGetAssetAliasDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + assetAliasId, + }: { + assetAliasId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseAssetServiceGetAssetAliasKeyFn({ assetAliasId }, queryKey), + queryFn: () => AssetService.getAssetAlias({ assetAliasId }) as TData, + ...options, + }); +/** + * Get Asset Events + * Get asset events. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.assetId + * @param data.sourceDagId + * @param data.sourceTaskId + * @param data.sourceRunId + * @param data.sourceMapIndex + * @param data.timestampGte + * @param data.timestampLte + * @returns AssetEventCollectionResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetAssetEventsSuspense = < + TData = Common.AssetServiceGetAssetEventsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }: { + assetId?: number; + limit?: number; + offset?: number; + orderBy?: string; + sourceDagId?: string; + sourceMapIndex?: number; + sourceRunId?: string; + sourceTaskId?: string; + timestampGte?: string; + timestampLte?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseAssetServiceGetAssetEventsKeyFn( + { + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }, + queryKey, + ), + queryFn: () => + AssetService.getAssetEvents({ + assetId, + limit, + offset, + orderBy, + sourceDagId, + sourceMapIndex, + sourceRunId, + sourceTaskId, + timestampGte, + timestampLte, + }) as TData, + ...options, + }); +/** + * Get Asset Queued Events + * Get queued asset events for an asset. + * @param data The data for the request. + * @param data.assetId + * @param data.before + * @returns QueuedEventCollectionResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetAssetQueuedEventsSuspense = < + TData = Common.AssetServiceGetAssetQueuedEventsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + assetId, + before, + }: { + assetId: number; + before?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseAssetServiceGetAssetQueuedEventsKeyFn({ assetId, before }, queryKey), + queryFn: () => AssetService.getAssetQueuedEvents({ assetId, before }) as TData, + ...options, + }); +/** + * Get Asset + * Get an asset. + * @param data The data for the request. + * @param data.assetId + * @returns AssetResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetAssetSuspense = < + TData = Common.AssetServiceGetAssetDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + assetId, + }: { + assetId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseAssetServiceGetAssetKeyFn({ assetId }, queryKey), + queryFn: () => AssetService.getAsset({ assetId }) as TData, + ...options, + }); +/** + * Get Dag Asset Queued Events + * Get queued asset events for a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.before + * @returns QueuedEventCollectionResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetDagAssetQueuedEventsSuspense = < + TData = Common.AssetServiceGetDagAssetQueuedEventsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + before, + dagId, + }: { + before?: string; + dagId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseAssetServiceGetDagAssetQueuedEventsKeyFn({ before, dagId }, queryKey), + queryFn: () => AssetService.getDagAssetQueuedEvents({ before, dagId }) as TData, + ...options, + }); +/** + * Get Dag Asset Queued Event + * Get a queued asset event for a DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.assetId + * @param data.before + * @returns QueuedEventResponse Successful Response + * @throws ApiError + */ +export const useAssetServiceGetDagAssetQueuedEventSuspense = < + TData = Common.AssetServiceGetDagAssetQueuedEventDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + assetId, + before, + dagId, + }: { + assetId: number; + before?: string; + dagId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseAssetServiceGetDagAssetQueuedEventKeyFn({ assetId, before, dagId }, queryKey), + queryFn: () => AssetService.getDagAssetQueuedEvent({ assetId, before, dagId }) as TData, + ...options, + }); +/** + * Next Run Assets + * @param data The data for the request. + * @param data.dagId + * @returns unknown Successful Response + * @throws ApiError + */ +export const useAssetServiceNextRunAssetsSuspense = < + TData = Common.AssetServiceNextRunAssetsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + }: { + dagId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseAssetServiceNextRunAssetsKeyFn({ dagId }, queryKey), + queryFn: () => AssetService.nextRunAssets({ dagId }) as TData, + ...options, + }); +/** + * List Backfills + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns BackfillCollectionResponse Successful Response + * @throws ApiError + */ +export const useBackfillServiceListBackfillsSuspense = < + TData = Common.BackfillServiceListBackfillsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + limit, + offset, + orderBy, + }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ dagId, limit, offset, orderBy }, queryKey), + queryFn: () => BackfillService.listBackfills({ dagId, limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns BackfillResponse Successful Response + * @throws ApiError + */ +export const useBackfillServiceGetBackfillSuspense = < + TData = Common.BackfillServiceGetBackfillDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + backfillId, + }: { + backfillId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }, queryKey), + queryFn: () => BackfillService.getBackfill({ backfillId }) as TData, + ...options, + }); +/** + * List Backfills + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.dagId + * @param data.active + * @returns BackfillCollectionResponse Successful Response + * @throws ApiError + */ +export const useBackfillServiceListBackfills1Suspense = < + TData = Common.BackfillServiceListBackfills1DefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + active, + dagId, + limit, + offset, + orderBy, + }: { + active?: boolean; + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseBackfillServiceListBackfills1KeyFn( + { active, dagId, limit, offset, orderBy }, + queryKey, + ), + queryFn: () => BackfillService.listBackfills1({ active, dagId, limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Connection + * Get a connection entry. + * @param data The data for the request. + * @param data.connectionId + * @returns ConnectionResponse Successful Response + * @throws ApiError + */ +export const useConnectionServiceGetConnectionSuspense = < + TData = Common.ConnectionServiceGetConnectionDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + connectionId, + }: { + connectionId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseConnectionServiceGetConnectionKeyFn({ connectionId }, queryKey), + queryFn: () => ConnectionService.getConnection({ connectionId }) as TData, + ...options, + }); +/** + * Get Connections + * Get all connection entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.connectionIdPattern + * @returns ConnectionCollectionResponse Successful Response + * @throws ApiError + */ +export const useConnectionServiceGetConnectionsSuspense = < + TData = Common.ConnectionServiceGetConnectionsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + connectionIdPattern, + limit, + offset, + orderBy, + }: { + connectionIdPattern?: string; + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseConnectionServiceGetConnectionsKeyFn( + { connectionIdPattern, limit, offset, orderBy }, + queryKey, + ), + queryFn: () => ConnectionService.getConnections({ connectionIdPattern, limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Hook Meta Data + * Retrieve information about available connection types (hook classes) and their parameters. + * @returns ConnectionHookMetaData Successful Response + * @throws ApiError + */ +export const useConnectionServiceHookMetaDataSuspense = < + TData = Common.ConnectionServiceHookMetaDataDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseConnectionServiceHookMetaDataKeyFn(queryKey), + queryFn: () => ConnectionService.hookMetaData() as TData, + ...options, + }); +/** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceGetDagRunSuspense = < + TData = Common.DagRunServiceGetDagRunDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }, queryKey), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, + ...options, + }); +/** + * Get Upstream Asset Events + * If dag run is asset-triggered, return the asset events that triggered it. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns AssetEventCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceGetUpstreamAssetEventsSuspense = < + TData = Common.DagRunServiceGetUpstreamAssetEventsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagRunServiceGetUpstreamAssetEventsKeyFn({ dagId, dagRunId }, queryKey), + queryFn: () => DagRunService.getUpstreamAssetEvents({ dagId, dagRunId }) as TData, + ...options, + }); +/** + * Get Dag Runs + * Get all DAG Runs. + * + * This endpoint allows specifying `~` as the dag_id to retrieve Dag Runs for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.runType + * @param data.state + * @param data.orderBy + * @returns DAGRunCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceGetDagRunsSuspense = < + TData = Common.DagRunServiceGetDagRunsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }: { + dagId: string; + endDateGte?: string; + endDateLte?: string; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + runAfterGte?: string; + runAfterLte?: string; + runType?: string[]; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + updatedAtGte?: string; + updatedAtLte?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagRunServiceGetDagRunsKeyFn( + { + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }, + queryKey, + ), + queryFn: () => + DagRunService.getDagRuns({ + dagId, + endDateGte, + endDateLte, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + runAfterGte, + runAfterLte, + runType, + startDateGte, + startDateLte, + state, + updatedAtGte, + updatedAtLte, + }) as TData, + ...options, + }); +/** + * Get Dag Source + * Get source code using file token. + * @param data The data for the request. + * @param data.dagId + * @param data.versionNumber + * @param data.accept + * @returns DAGSourceResponse Successful Response + * @throws ApiError + */ +export const useDagSourceServiceGetDagSourceSuspense = < + TData = Common.DagSourceServiceGetDagSourceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + accept, + dagId, + versionNumber, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + dagId: string; + versionNumber?: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ accept, dagId, versionNumber }, queryKey), + queryFn: () => DagSourceService.getDagSource({ accept, dagId, versionNumber }) as TData, + ...options, + }); +/** + * Get Dag Stats + * Get Dag statistics. + * @param data The data for the request. + * @param data.dagIds + * @returns DagStatsCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagStatsServiceGetDagStatsSuspense = < + TData = Common.DagStatsServiceGetDagStatsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagIds, + }: { + dagIds?: string[]; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagStatsServiceGetDagStatsKeyFn({ dagIds }, queryKey), + queryFn: () => DagStatsService.getDagStats({ dagIds }) as TData, + ...options, + }); +/** + * Get Dag Reports + * Get DAG report. + * @param data The data for the request. + * @param data.subdir + * @returns unknown Successful Response + * @throws ApiError + */ +export const useDagReportServiceGetDagReportsSuspense = < + TData = Common.DagReportServiceGetDagReportsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + subdir, + }: { + subdir: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagReportServiceGetDagReportsKeyFn({ subdir }, queryKey), + queryFn: () => DagReportService.getDagReports({ subdir }) as TData, + ...options, + }); +/** + * Get Config + * @param data The data for the request. + * @param data.section + * @param data.accept + * @returns Config Successful Response + * @throws ApiError + */ +export const useConfigServiceGetConfigSuspense = < + TData = Common.ConfigServiceGetConfigDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + accept, + section, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + section?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseConfigServiceGetConfigKeyFn({ accept, section }, queryKey), + queryFn: () => ConfigService.getConfig({ accept, section }) as TData, + ...options, + }); +/** + * Get Config Value + * @param data The data for the request. + * @param data.section + * @param data.option + * @param data.accept + * @returns Config Successful Response + * @throws ApiError + */ +export const useConfigServiceGetConfigValueSuspense = < + TData = Common.ConfigServiceGetConfigValueDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + accept, + option, + section, + }: { + accept?: "application/json" | "text/plain" | "*/*"; + option: string; + section: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseConfigServiceGetConfigValueKeyFn({ accept, option, section }, queryKey), + queryFn: () => ConfigService.getConfigValue({ accept, option, section }) as TData, + ...options, + }); +/** + * Get Configs + * Get configs for UI. + * @returns ConfigResponse Successful Response + * @throws ApiError + */ +export const useConfigServiceGetConfigsSuspense = < + TData = Common.ConfigServiceGetConfigsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseConfigServiceGetConfigsKeyFn(queryKey), + queryFn: () => ConfigService.getConfigs() as TData, + ...options, + }); +/** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagWarningServiceListDagWarningsSuspense = < + TData = Common.DagWarningServiceListDagWarningsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn( + { dagId, limit, offset, orderBy, warningType }, + queryKey, + ), + queryFn: () => DagWarningService.listDagWarnings({ dagId, limit, offset, orderBy, warningType }) as TData, + ...options, + }); +/** + * Get Dags + * Get all DAGs. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.tagsMatchMode + * @param data.owners + * @param data.dagIdPattern + * @param data.dagDisplayNamePattern + * @param data.excludeStale + * @param data.paused + * @param data.lastDagRunState + * @param data.dagRunStartDateGte + * @param data.dagRunStartDateLte + * @param data.dagRunEndDateGte + * @param data.dagRunEndDateLte + * @param data.dagRunState + * @param data.orderBy + * @returns DAGCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagServiceGetDagsSuspense = < + TData = Common.DagServiceGetDagsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunEndDateGte?: string; + dagRunEndDateLte?: string; + dagRunStartDateGte?: string; + dagRunStartDateLte?: string; + dagRunState?: string[]; + excludeStale?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + orderBy?: string; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagServiceGetDagsKeyFn( + { + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }, + queryKey, + ), + queryFn: () => + DagService.getDags({ + dagDisplayNamePattern, + dagIdPattern, + dagRunEndDateGte, + dagRunEndDateLte, + dagRunStartDateGte, + dagRunStartDateLte, + dagRunState, + excludeStale, + lastDagRunState, + limit, + offset, + orderBy, + owners, + paused, + tags, + tagsMatchMode, + }) as TData, + ...options, + }); +/** + * Get Dag + * Get basic information about a DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGResponse Successful Response + * @throws ApiError + */ +export const useDagServiceGetDagSuspense = < + TData = Common.DagServiceGetDagDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + }: { + dagId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagServiceGetDagKeyFn({ dagId }, queryKey), + queryFn: () => DagService.getDag({ dagId }) as TData, + ...options, + }); +/** + * Get Dag Details + * Get details of DAG. + * @param data The data for the request. + * @param data.dagId + * @returns DAGDetailsResponse Successful Response + * @throws ApiError + */ +export const useDagServiceGetDagDetailsSuspense = < + TData = Common.DagServiceGetDagDetailsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + }: { + dagId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagServiceGetDagDetailsKeyFn({ dagId }, queryKey), + queryFn: () => DagService.getDagDetails({ dagId }) as TData, + ...options, + }); +/** + * Get Dag Tags + * Get all DAG tags. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.tagNamePattern + * @returns DAGTagCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagServiceGetDagTagsSuspense = < + TData = Common.DagServiceGetDagTagsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + tagNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + tagNamePattern?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagServiceGetDagTagsKeyFn({ limit, offset, orderBy, tagNamePattern }, queryKey), + queryFn: () => DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }) as TData, + ...options, + }); +/** + * Recent Dag Runs + * Get recent DAG runs. + * @param data The data for the request. + * @param data.dagRunsLimit + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.tagsMatchMode + * @param data.owners + * @param data.dagIds + * @param data.dagIdPattern + * @param data.dagDisplayNamePattern + * @param data.excludeStale + * @param data.paused + * @param data.lastDagRunState + * @returns DAGWithLatestDagRunsCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagServiceRecentDagRunsSuspense = < + TData = Common.DagServiceRecentDagRunsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagIds?: string[]; + dagRunsLimit?: number; + excludeStale?: boolean; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + owners?: string[]; + paused?: boolean; + tags?: string[]; + tagsMatchMode?: "any" | "all"; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagServiceRecentDagRunsKeyFn( + { + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }, + queryKey, + ), + queryFn: () => + DagService.recentDagRuns({ + dagDisplayNamePattern, + dagIdPattern, + dagIds, + dagRunsLimit, + excludeStale, + lastDagRunState, + limit, + offset, + owners, + paused, + tags, + tagsMatchMode, + }) as TData, + ...options, + }); +/** + * Get Event Log + * @param data The data for the request. + * @param data.eventLogId + * @returns EventLogResponse Successful Response + * @throws ApiError + */ +export const useEventLogServiceGetEventLogSuspense = < + TData = Common.EventLogServiceGetEventLogDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + eventLogId, + }: { + eventLogId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }, queryKey), + queryFn: () => EventLogService.getEventLog({ eventLogId }) as TData, + ...options, + }); +/** + * Get Event Logs + * Get all Event Logs. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.dagId + * @param data.taskId + * @param data.runId + * @param data.mapIndex + * @param data.tryNumber + * @param data.owner + * @param data.event + * @param data.excludedEvents + * @param data.includedEvents + * @param data.before + * @param data.after + * @returns EventLogCollectionResponse Successful Response + * @throws ApiError + */ +export const useEventLogServiceGetEventLogsSuspense = < + TData = Common.EventLogServiceGetEventLogsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }: { + after?: string; + before?: string; + dagId?: string; + event?: string; + excludedEvents?: string[]; + includedEvents?: string[]; + limit?: number; + mapIndex?: number; + offset?: number; + orderBy?: string; + owner?: string; + runId?: string; + taskId?: string; + tryNumber?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseEventLogServiceGetEventLogsKeyFn( + { + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }, + queryKey, + ), + queryFn: () => + EventLogService.getEventLogs({ + after, + before, + dagId, + event, + excludedEvents, + includedEvents, + limit, + mapIndex, + offset, + orderBy, + owner, + runId, + taskId, + tryNumber, + }) as TData, + ...options, + }); +/** + * Get Extra Links + * Get extra links for task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns ExtraLinkCollectionResponse Successful Response + * @throws ApiError + */ +export const useExtraLinksServiceGetExtraLinksSuspense = < + TData = Common.ExtraLinksServiceGetExtraLinksDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseExtraLinksServiceGetExtraLinksKeyFn({ dagId, dagRunId, mapIndex, taskId }, queryKey), + queryFn: () => ExtraLinksService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, + ...options, + }); +/** + * Get Extra Links + * Get extra links for task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns ExtraLinkCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetExtraLinksSuspense = < + TData = Common.TaskInstanceServiceGetExtraLinksDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetExtraLinksKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => TaskInstanceService.getExtraLinks({ dagId, dagRunId, mapIndex, taskId }) as TData, + ...options, + }); +/** + * Get Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstanceSuspense = < + TData = Common.TaskInstanceServiceGetTaskInstanceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + taskId, + }: { + dagId: string; + dagRunId: string; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ dagId, dagRunId, taskId }, queryKey), + queryFn: () => TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) as TData, + ...options, + }); +/** + * Get Mapped Task Instances + * Get list of mapped task instances. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.durationGte + * @param data.durationLte + * @param data.state + * @param data.pool + * @param data.queue + * @param data.executor + * @param data.versionNumber + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetMappedTaskInstancesSuspense = < + TData = Common.TaskInstanceServiceGetMappedTaskInstancesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskId: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstancesKeyFn( + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getMappedTaskInstances({ + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }) as TData, + ...options, + }); +/** + * Get Task Instance Dependencies + * Get dependencies blocking task from getting scheduled. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskDependencyCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexSuspense = < + TData = Common.TaskInstanceServiceGetTaskInstanceDependenciesByMapIndexDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesByMapIndexKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getTaskInstanceDependenciesByMapIndex({ + dagId, + dagRunId, + mapIndex, + taskId, + }) as TData, + ...options, + }); +/** + * Get Task Instance Dependencies + * Get dependencies blocking task from getting scheduled. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskDependencyCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstanceDependenciesSuspense = < + TData = Common.TaskInstanceServiceGetTaskInstanceDependenciesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceDependenciesKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getTaskInstanceDependencies({ dagId, dagRunId, mapIndex, taskId }) as TData, + ...options, + }); +/** + * Get Task Instance Tries + * Get list of task instances history. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstanceTriesSuspense = < + TData = Common.TaskInstanceServiceGetTaskInstanceTriesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTriesKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => TaskInstanceService.getTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, + ...options, + }); +/** + * Get Mapped Task Instance Tries + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceHistoryCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetMappedTaskInstanceTriesSuspense = < + TData = Common.TaskInstanceServiceGetMappedTaskInstanceTriesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTriesKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getMappedTaskInstanceTries({ dagId, dagRunId, mapIndex, taskId }) as TData, + ...options, + }); +/** + * Get Mapped Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetMappedTaskInstanceSuspense = < + TData = Common.TaskInstanceServiceGetMappedTaskInstanceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => TaskInstanceService.getMappedTaskInstance({ dagId, dagRunId, mapIndex, taskId }) as TData, + ...options, + }); +/** + * Get Task Instances + * Get list of task instances. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id to retrieve Task Instances for all DAGs + * and DAG runs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.updatedAtGte + * @param data.updatedAtLte + * @param data.durationGte + * @param data.durationLte + * @param data.taskDisplayNamePattern + * @param data.state + * @param data.pool + * @param data.queue + * @param data.executor + * @param data.versionNumber + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns TaskInstanceCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstancesSuspense = < + TData = Common.TaskInstanceServiceGetTaskInstancesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }: { + dagId: string; + dagRunId: string; + durationGte?: number; + durationLte?: number; + endDateGte?: string; + endDateLte?: string; + executor?: string[]; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + pool?: string[]; + queue?: string[]; + runAfterGte?: string; + runAfterLte?: string; + startDateGte?: string; + startDateLte?: string; + state?: string[]; + taskDisplayNamePattern?: string; + taskId?: string; + updatedAtGte?: string; + updatedAtLte?: string; + versionNumber?: number[]; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstancesKeyFn( + { + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getTaskInstances({ + dagId, + dagRunId, + durationGte, + durationLte, + endDateGte, + endDateLte, + executor, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + pool, + queue, + runAfterGte, + runAfterLte, + startDateGte, + startDateLte, + state, + taskDisplayNamePattern, + taskId, + updatedAtGte, + updatedAtLte, + versionNumber, + }) as TData, + ...options, + }); +/** + * Get Task Instance Try Details + * Get task instance details by try number. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.taskTryNumber + * @param data.mapIndex + * @returns TaskInstanceHistoryResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstanceTryDetailsSuspense = < + TData = Common.TaskInstanceServiceGetTaskInstanceTryDetailsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }: { + dagId: string; + dagRunId: string; + mapIndex?: number; + taskId: string; + taskTryNumber: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceTryDetailsKeyFn( + { dagId, dagRunId, mapIndex, taskId, taskTryNumber }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getTaskInstanceTryDetails({ + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }) as TData, + ...options, + }); +/** + * Get Mapped Task Instance Try Details + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.taskTryNumber + * @param data.mapIndex + * @returns TaskInstanceHistoryResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetMappedTaskInstanceTryDetailsSuspense = < + TData = Common.TaskInstanceServiceGetMappedTaskInstanceTryDetailsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + taskTryNumber: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceTryDetailsKeyFn( + { dagId, dagRunId, mapIndex, taskId, taskTryNumber }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getMappedTaskInstanceTryDetails({ + dagId, + dagRunId, + mapIndex, + taskId, + taskTryNumber, + }) as TData, + ...options, + }); +/** + * Get Log + * Get logs for a specific task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.tryNumber + * @param data.fullContent + * @param data.mapIndex + * @param data.token + * @param data.accept + * @returns TaskInstancesLogResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetLogSuspense = < + TData = Common.TaskInstanceServiceGetLogDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + accept, + dagId, + dagRunId, + fullContent, + mapIndex, + taskId, + token, + tryNumber, + }: { + accept?: "application/json" | "*/*" | "application/x-ndjson"; + dagId: string; + dagRunId: string; + fullContent?: boolean; + mapIndex?: number; + taskId: string; + token?: string; + tryNumber: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetLogKeyFn( + { accept, dagId, dagRunId, fullContent, mapIndex, taskId, token, tryNumber }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getLog({ + accept, + dagId, + dagRunId, + fullContent, + mapIndex, + taskId, + token, + tryNumber, + }) as TData, + ...options, + }); +/** + * Get Import Error + * Get an import error. + * @param data The data for the request. + * @param data.importErrorId + * @returns ImportErrorResponse Successful Response + * @throws ApiError + */ +export const useImportErrorServiceGetImportErrorSuspense = < + TData = Common.ImportErrorServiceGetImportErrorDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + importErrorId, + }: { + importErrorId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorKeyFn({ importErrorId }, queryKey), + queryFn: () => ImportErrorService.getImportError({ importErrorId }) as TData, + ...options, + }); +/** + * Get Import Errors + * Get all import errors. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns ImportErrorCollectionResponse Successful Response + * @throws ApiError + */ +export const useImportErrorServiceGetImportErrorsSuspense = < + TData = Common.ImportErrorServiceGetImportErrorsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseImportErrorServiceGetImportErrorsKeyFn({ limit, offset, orderBy }, queryKey), + queryFn: () => ImportErrorService.getImportErrors({ limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Jobs + * Get all jobs. + * @param data The data for the request. + * @param data.isAlive + * @param data.startDateGte + * @param data.startDateLte + * @param data.endDateGte + * @param data.endDateLte + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.jobState + * @param data.jobType + * @param data.hostname + * @param data.executorClass + * @returns JobCollectionResponse Successful Response + * @throws ApiError + */ +export const useJobServiceGetJobsSuspense = < + TData = Common.JobServiceGetJobsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }: { + endDateGte?: string; + endDateLte?: string; + executorClass?: string; + hostname?: string; + isAlive?: boolean; + jobState?: string; + jobType?: string; + limit?: number; + offset?: number; + orderBy?: string; + startDateGte?: string; + startDateLte?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseJobServiceGetJobsKeyFn( + { + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }, + queryKey, + ), + queryFn: () => + JobService.getJobs({ + endDateGte, + endDateLte, + executorClass, + hostname, + isAlive, + jobState, + jobType, + limit, + offset, + orderBy, + startDateGte, + startDateLte, + }) as TData, + ...options, + }); +/** + * Get Plugins + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response + * @throws ApiError + */ +export const usePluginServiceGetPluginsSuspense = < + TData = Common.PluginServiceGetPluginsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }, queryKey), + queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, + ...options, + }); +/** + * Get Pool + * Get a pool. + * @param data The data for the request. + * @param data.poolName + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const usePoolServiceGetPoolSuspense = < + TData = Common.PoolServiceGetPoolDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + poolName, + }: { + poolName: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), + queryFn: () => PoolService.getPool({ poolName }) as TData, + ...options, + }); +/** + * Get Pools + * Get all pools entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.poolNamePattern + * @returns PoolCollectionResponse Successful Response + * @throws ApiError + */ +export const usePoolServiceGetPoolsSuspense = < + TData = Common.PoolServiceGetPoolsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + poolNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + poolNamePattern?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy, poolNamePattern }, queryKey), + queryFn: () => PoolService.getPools({ limit, offset, orderBy, poolNamePattern }) as TData, + ...options, + }); +/** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ +export const useProviderServiceGetProvidersSuspense = < + TData = Common.ProviderServiceGetProvidersDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }, queryKey), + queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, + ...options, + }); +/** + * Get Xcom Entry + * Get an XCom entry. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @param data.dagRunId + * @param data.xcomKey + * @param data.mapIndex + * @param data.deserialize + * @param data.stringify + * @returns unknown Successful Response + * @throws ApiError + */ +export const useXcomServiceGetXcomEntrySuspense = < + TData = Common.XcomServiceGetXcomEntryDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + deserialize, + mapIndex, + stringify, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + deserialize?: boolean; + mapIndex?: number; + stringify?: boolean; + taskId: string; + xcomKey: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseXcomServiceGetXcomEntryKeyFn( + { dagId, dagRunId, deserialize, mapIndex, stringify, taskId, xcomKey }, + queryKey, + ), + queryFn: () => + XcomService.getXcomEntry({ + dagId, + dagRunId, + deserialize, + mapIndex, + stringify, + taskId, + xcomKey, + }) as TData, + ...options, + }); +/** + * Get Xcom Entries + * Get all XCom entries. + * + * This endpoint allows specifying `~` as the dag_id, dag_run_id, task_id to retrieve XCom entries for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.xcomKey + * @param data.mapIndex + * @param data.limit + * @param data.offset + * @returns XComCollectionResponse Successful Response + * @throws ApiError + */ +export const useXcomServiceGetXcomEntriesSuspense = < + TData = Common.XcomServiceGetXcomEntriesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + limit, + mapIndex, + offset, + taskId, + xcomKey, + }: { + dagId: string; + dagRunId: string; + limit?: number; + mapIndex?: number; + offset?: number; + taskId: string; + xcomKey?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseXcomServiceGetXcomEntriesKeyFn( + { dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }, + queryKey, + ), + queryFn: () => + XcomService.getXcomEntries({ dagId, dagRunId, limit, mapIndex, offset, taskId, xcomKey }) as TData, + ...options, + }); +/** + * Get Tasks + * Get tasks for DAG. + * @param data The data for the request. + * @param data.dagId + * @param data.orderBy + * @returns TaskCollectionResponse Successful Response + * @throws ApiError + */ +export const useTaskServiceGetTasksSuspense = < + TData = Common.TaskServiceGetTasksDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + orderBy, + }: { + dagId: string; + orderBy?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskServiceGetTasksKeyFn({ dagId, orderBy }, queryKey), + queryFn: () => TaskService.getTasks({ dagId, orderBy }) as TData, + ...options, + }); +/** + * Get Task + * Get simplified representation of a task. + * @param data The data for the request. + * @param data.dagId + * @param data.taskId + * @returns TaskResponse Successful Response + * @throws ApiError + */ +export const useTaskServiceGetTaskSuspense = < + TData = Common.TaskServiceGetTaskDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + taskId, + }: { + dagId: string; + taskId: unknown; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskServiceGetTaskKeyFn({ dagId, taskId }, queryKey), + queryFn: () => TaskService.getTask({ dagId, taskId }) as TData, + ...options, + }); +/** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceGetVariableSuspense = < + TData = Common.VariableServiceGetVariableDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + variableKey, + }: { + variableKey: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseVariableServiceGetVariableKeyFn({ variableKey }, queryKey), + queryFn: () => VariableService.getVariable({ variableKey }) as TData, + ...options, + }); +/** + * Get Variables + * Get all Variables entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.variableKeyPattern + * @returns VariableCollectionResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceGetVariablesSuspense = < + TData = Common.VariableServiceGetVariablesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + variableKeyPattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + variableKeyPattern?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseVariableServiceGetVariablesKeyFn( + { limit, offset, orderBy, variableKeyPattern }, + queryKey, + ), + queryFn: () => VariableService.getVariables({ limit, offset, orderBy, variableKeyPattern }) as TData, + ...options, + }); +/** + * Get Dag Version + * Get one Dag Version. + * @param data The data for the request. + * @param data.dagId + * @param data.versionNumber + * @returns DagVersionResponse Successful Response + * @throws ApiError + */ +export const useDagVersionServiceGetDagVersionSuspense = < + TData = Common.DagVersionServiceGetDagVersionDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + versionNumber, + }: { + dagId: string; + versionNumber: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagVersionServiceGetDagVersionKeyFn({ dagId, versionNumber }, queryKey), + queryFn: () => DagVersionService.getDagVersion({ dagId, versionNumber }) as TData, + ...options, + }); +/** + * Get Dag Versions + * Get all DAG Versions. + * + * This endpoint allows specifying `~` as the dag_id to retrieve DAG Versions for all DAGs. + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.versionNumber + * @param data.bundleName + * @param data.bundleVersion + * @param data.orderBy + * @returns DAGVersionCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagVersionServiceGetDagVersionsSuspense = < + TData = Common.DagVersionServiceGetDagVersionsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + bundleName, + bundleVersion, + dagId, + limit, + offset, + orderBy, + versionNumber, + }: { + bundleName?: string; + bundleVersion?: string; + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + versionNumber?: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagVersionServiceGetDagVersionsKeyFn( + { bundleName, bundleVersion, dagId, limit, offset, orderBy, versionNumber }, + queryKey, + ), + queryFn: () => + DagVersionService.getDagVersions({ + bundleName, + bundleVersion, + dagId, + limit, + offset, + orderBy, + versionNumber, + }) as TData, + ...options, + }); +/** + * Get Health + * @returns HealthInfoResponse Successful Response + * @throws ApiError + */ +export const useMonitorServiceGetHealthSuspense = < + TData = Common.MonitorServiceGetHealthDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), + queryFn: () => MonitorService.getHealth() as TData, + ...options, + }); +/** + * Get Version + * Get version information. + * @returns VersionInfo Successful Response + * @throws ApiError + */ +export const useVersionServiceGetVersionSuspense = < + TData = Common.VersionServiceGetVersionDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), + queryFn: () => VersionService.getVersion() as TData, + ...options, + }); +/** + * Login + * Redirect to the login URL depending on the AuthManager configured. + * @param data The data for the request. + * @param data.next + * @returns unknown Successful Response + * @throws ApiError + */ +export const useLoginServiceLoginSuspense = < + TData = Common.LoginServiceLoginDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + next, + }: { + next?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseLoginServiceLoginKeyFn({ next }, queryKey), + queryFn: () => LoginService.login({ next }) as TData, + ...options, + }); +/** + * Logout + * Logout the user. + * @param data The data for the request. + * @param data.next + * @returns unknown Successful Response + * @throws ApiError + */ +export const useLoginServiceLogoutSuspense = < + TData = Common.LoginServiceLogoutDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + next, + }: { + next?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseLoginServiceLogoutKeyFn({ next }, queryKey), + queryFn: () => LoginService.logout({ next }) as TData, + ...options, + }); +/** + * Get Auth Menus + * @returns MenuItemCollectionResponse Successful Response + * @throws ApiError + */ +export const useAuthLinksServiceGetAuthMenusSuspense = < + TData = Common.AuthLinksServiceGetAuthMenusDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseAuthLinksServiceGetAuthMenusKeyFn(queryKey), + queryFn: () => AuthLinksService.getAuthMenus() as TData, + ...options, + }); +/** + * Get Dependencies + * Dependencies graph. + * @param data The data for the request. + * @param data.nodeId + * @returns BaseGraphResponse Successful Response + * @throws ApiError + */ +export const useDependenciesServiceGetDependenciesSuspense = < + TData = Common.DependenciesServiceGetDependenciesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + nodeId, + }: { + nodeId?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDependenciesServiceGetDependenciesKeyFn({ nodeId }, queryKey), + queryFn: () => DependenciesService.getDependencies({ nodeId }) as TData, + ...options, + }); +/** + * Historical Metrics + * Return cluster activity historical metrics. + * @param data The data for the request. + * @param data.startDate + * @param data.endDate + * @returns HistoricalMetricDataResponse Successful Response + * @throws ApiError + */ +export const useDashboardServiceHistoricalMetricsSuspense = < + TData = Common.DashboardServiceHistoricalMetricsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + endDate, + startDate, + }: { + endDate?: string; + startDate: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDashboardServiceHistoricalMetricsKeyFn({ endDate, startDate }, queryKey), + queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }) as TData, + ...options, + }); +/** + * Dag Stats + * Return basic DAG stats with counts of DAGs in various states. + * @returns DashboardDagStatsResponse Successful Response + * @throws ApiError + */ +export const useDashboardServiceDagStatsSuspense = < + TData = Common.DashboardServiceDagStatsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDashboardServiceDagStatsKeyFn(queryKey), + queryFn: () => DashboardService.dagStats() as TData, + ...options, + }); +/** + * Structure Data + * Get Structure Data. + * @param data The data for the request. + * @param data.dagId + * @param data.includeUpstream + * @param data.includeDownstream + * @param data.root + * @param data.externalDependencies + * @param data.versionNumber + * @returns StructureDataResponse Successful Response + * @throws ApiError + */ +export const useStructureServiceStructureDataSuspense = < + TData = Common.StructureServiceStructureDataDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + externalDependencies, + includeDownstream, + includeUpstream, + root, + versionNumber, + }: { + dagId: string; + externalDependencies?: boolean; + includeDownstream?: boolean; + includeUpstream?: boolean; + root?: string; + versionNumber?: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseStructureServiceStructureDataKeyFn( + { dagId, externalDependencies, includeDownstream, includeUpstream, root, versionNumber }, + queryKey, + ), + queryFn: () => + StructureService.structureData({ + dagId, + externalDependencies, + includeDownstream, + includeUpstream, + root, + versionNumber, + }) as TData, + ...options, + }); +/** + * Grid Data + * Return grid data. + * @param data The data for the request. + * @param data.dagId + * @param data.includeUpstream + * @param data.includeDownstream + * @param data.root + * @param data.offset + * @param data.runType + * @param data.state + * @param data.limit + * @param data.orderBy + * @param data.runAfterGte + * @param data.runAfterLte + * @param data.logicalDateGte + * @param data.logicalDateLte + * @returns GridResponse Successful Response + * @throws ApiError + */ +export const useGridServiceGridDataSuspense = < + TData = Common.GridServiceGridDataDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }: { + dagId: string; + includeDownstream?: boolean; + includeUpstream?: boolean; + limit?: number; + logicalDateGte?: string; + logicalDateLte?: string; + offset?: number; + orderBy?: string; + root?: string; + runAfterGte?: string; + runAfterLte?: string; + runType?: string[]; + state?: string[]; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseGridServiceGridDataKeyFn( + { + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }, + queryKey, + ), + queryFn: () => + GridService.gridData({ + dagId, + includeDownstream, + includeUpstream, + limit, + logicalDateGte, + logicalDateLte, + offset, + orderBy, + root, + runAfterGte, + runAfterLte, + runType, + state, + }) as TData, + ...options, + }); diff --git a/contributing-docs/testing/integration_tests.rst b/contributing-docs/testing/integration_tests.rst index 3be9f60a17a65..bc7e9c70f1d32 100644 --- a/contributing-docs/testing/integration_tests.rst +++ b/contributing-docs/testing/integration_tests.rst @@ -64,6 +64,8 @@ core or provider type of test. +--------------+-------------------------------------------------------+ | drill | Integration required for drill operator and hook. | +--------------+-------------------------------------------------------+ +| gremlin | Integration required for gremlin operator and hook. | ++--------------+-------------------------------------------------------+ | kafka | Integration required for Kafka hooks. | +--------------+-------------------------------------------------------+ | kerberos | Integration that provides Kerberos authentication. | diff --git a/dev/breeze/doc/01_installation.rst b/dev/breeze/doc/01_installation.rst index 08b567c0d4a7f..9b54ed17d7542 100644 --- a/dev/breeze/doc/01_installation.rst +++ b/dev/breeze/doc/01_installation.rst @@ -157,6 +157,17 @@ We highly recommend using ``uv`` to manage your Python environments, as it is ve easy to use, it is faster than any of the other tools availables (way faster!) and has a lot of features that make it easier to work with Python. +The ``gh`` cli needed for release managers +------------------------------------------ + +The ``gh`` GitHub CLI is a command line tool that allows you to interact with GitHub repositories, issues, pull +requests, and more. It is useful for release managers to automate tasks such as creating releases, +managing issues, and starting workflows (for example during documentation building). Release +managers should have ``gh`` installed (see `gh installation guide `_) and they +should follow configuration steps to authorize ``gh`` in their local airflow repository (basically +running ``gh auth login`` command and following the instructions). + + Alternative: pipx tool ---------------------- diff --git a/dev/breeze/doc/02_customizing.rst b/dev/breeze/doc/02_customizing.rst index 6093201547235..315c749e11e58 100644 --- a/dev/breeze/doc/02_customizing.rst +++ b/dev/breeze/doc/02_customizing.rst @@ -93,6 +93,12 @@ that you may find helpful. bind -T root C-S-Up select-pane -U bind -T root C-S-Down select-pane -D + # quickly disable mouse + zoom pane (for easy copying) + bind c run-shell "tmux setw mouse off" \; resize-pane -Z \; display-message "🚫 Mouse disabled & pane zoomed" + + # quickly re-enable mouse + unzoom pane (restore normal behavior) + bind v run-shell "tmux setw mouse on" \; resize-pane -Z \; display-message "🖱️ Mouse enabled & pane unzoomed" + Some helpful commands: - ``ctrl-b + z``: zoom into selected pane @@ -106,6 +112,10 @@ To copy an entire pane: - extend selection to end: ``G`` - copy and clear selection: ``enter`` +.. tip:: + + You can add the custom bindings (like ``bind c`` and ``bind v``) directly to ``files/airflow-breeze-config/.tmux.conf``. + This way they will be automatically loaded when you start a Breeze tmux session. Additional tools in Breeze container ------------------------------------ @@ -143,12 +153,12 @@ When Breeze starts, it can start additional integrations. Those are additional d that are started in the same docker-compose command. Those are required by some of the tests as described in ``_. -By default Breeze starts only airflow container without any integration enabled. If you selected +By default Breeze starts only Airflow container without any integration enabled. If you selected ``postgres`` or ``mysql`` backend, the container for the selected backend is also started (but only the one that is selected). You can start the additional integrations by passing ``--integration`` flag with appropriate integration name when starting Breeze. You can specify several ``--integration`` flags to start more than one integration at a time. -Finally you can specify ``--integration all-testable`` to start all testable integrations and +Finally, you can specify ``--integration all-testable`` to start all testable integrations and ``--integration all`` to enable all integrations. Once integration is started, it will continue to run until the environment is stopped with diff --git a/dev/breeze/doc/03_developer_tasks.rst b/dev/breeze/doc/03_developer_tasks.rst index 975b7adfd5ac6..9bc70819903c0 100644 --- a/dev/breeze/doc/03_developer_tasks.rst +++ b/dev/breeze/doc/03_developer_tasks.rst @@ -112,7 +112,7 @@ When you run Airflow Breeze, the following ports are automatically forwarded: .. code-block:: * 12322 -> forwarded to Airflow ssh server -> airflow:22 - * 28080 -> forwarded to Airflow API server or webserver -> airflow:8080 + * 28080 -> forwarded to Airflow API server -> airflow:8080 * 25555 -> forwarded to Flower dashboard -> airflow:5555 * 25433 -> forwarded to Postgres database -> postgres:5432 * 23306 -> forwarded to MySQL database -> mysql:3306 @@ -124,20 +124,23 @@ You can connect to these ports/databases using: .. code-block:: * ssh connection for remote debugging: ssh -p 12322 airflow@localhost pw: airflow - * API server or webserver: http://localhost:28080 + * API server: http://localhost:28080 * Flower: http://localhost:25555 * Postgres: jdbc:postgresql://localhost:25433/airflow?user=postgres&password=airflow * Mysql: jdbc:mysql://localhost:23306/airflow?user=root * Redis: redis://localhost:26379/0 If you do not use ``start-airflow`` command. You can use ``tmux`` to multiply terminals. -You may need to create a user prior to running the webserver in order to log in. +You may need to create a user prior to running the API server in order to log in. This can be done with the following command: .. code-block:: bash airflow users create --role Admin --username admin --password admin --email admin@example.com --firstname foo --lastname bar +.. note:: + ``airflow users`` command is only available when `FAB auth manager `_ is enabled. + For databases, you need to run ``airflow db reset`` at least once (or run some tests) after you started Airflow Breeze to get the database/tables created. You can connect to databases with IDE or any other database client: @@ -152,7 +155,7 @@ database client: You can change the used host port numbers by setting appropriate environment variables: * ``SSH_PORT`` -* ``WEB_HOST_PORT`` - API server for Airflow 3, or webserver port for Airflow 2 when --use-airflow-version is used +* ``WEB_HOST_PORT`` - API server when --use-airflow-version is used * ``POSTGRES_HOST_PORT`` * ``MYSQL_HOST_PORT`` * ``MSSQL_HOST_PORT`` @@ -221,6 +224,10 @@ short ``provider id`` (might be multiple of them). breeze build-docs +To build documentation for Task SDK package, use the below command +.. code-block:: bash + breeze build-docs task-sdk + or you can use package filter. The filters are glob pattern matching full package names and can be used to select more than one package with single filter. @@ -352,7 +359,7 @@ For testing Airflow you often want to start multiple components (in multiple ter built-in ``start-airflow`` command that start breeze container, launches multiple terminals using tmux and launches all Airflow necessary components in those terminals. -When you are starting airflow from local sources, www asset compilation is automatically executed before. +When you are starting Airflow from local sources, www asset compilation is automatically executed before. .. code-block:: bash @@ -391,8 +398,8 @@ These are all available flags of ``start-airflow`` command: Launching multiple terminals in the same environment ---------------------------------------------------- -Often if you want to run full airflow in the Breeze environment you need to launch multiple terminals and -run ``airflow webserver``, ``airflow scheduler``, ``airflow worker`` in separate terminals. +Often if you want to run full Airflow in the Breeze environment you need to launch multiple terminals and +run ``airflow api-server``, ``airflow scheduler``, ``airflow worker`` in separate terminals. This can be achieved either via ``tmux`` or via exec-ing into the running container from the host. Tmux is installed inside the container and you can launch it with ``tmux`` command. Tmux provides you with the @@ -417,8 +424,8 @@ These are all available flags of ``exec`` command: Compiling ui assets -------------------- -Airflow webserver needs to prepare www assets - compiled with node and yarn. The ``compile-ui-assets`` -command takes care about it. This is needed when you want to run webserver inside of the breeze. +Airflow API server needs to prepare www assets - compiled with node and yarn. The ``compile-ui-assets`` +command takes care about it. This is needed when you want to run API server inside of the breeze. .. image:: ./images/output_compile-ui-assets.svg :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_compile-ui-assets.svg @@ -439,7 +446,7 @@ Breeze uses docker images heavily and those images are rebuild periodically and images in docker cache. This might cause extra disk usage. Also running various docker compose commands (for example running tests with ``breeze testing core-tests``) might create additional docker networks that might prevent new networks from being created. Those networks are not removed automatically by docker-compose. -Also Breeze uses it's own cache to keep information about all images. +Also Breeze uses its own cache to keep information about all images. All those unused images, networks and cache can be removed by running ``breeze cleanup`` command. By default it will not remove the most recent images that you might need to run breeze commands, but you @@ -458,7 +465,7 @@ These are all available flags of ``cleanup`` command: Database and config volumes in Breeze ------------------------------------- -Breeze keeps data for all it's integration, database, configuration in named docker volumes. +Breeze keeps data for all its integration, database, configuration in named docker volumes. Those volumes are persisted until ``breeze down`` command. You can also preserve the volumes by adding flag ``--preserve-volumes`` when you run the command. Then, next time when you start Breeze, it will have the data pre-populated. @@ -494,7 +501,7 @@ Running Breeze with a StatsD Metrics Stack .......................................... You can launch an instance of Breeze pre-configured to emit StatsD metrics using -``breeze start-airflow --integration statsd``. This will launch an Airflow webserver +``breeze start-airflow --integration statsd``. This will launch an Airflow API server within the Breeze environment as well as containers running StatsD, Prometheus, and Grafana. The integration configures the "Targets" in Prometheus, the "Datasources" in Grafana, and includes a default dashboard in Grafana. @@ -547,9 +554,9 @@ Running Breeze with OpenLineage ............................... You can launch an instance of Breeze pre-configured to emit OpenLineage metrics using -``breeze start-airflow --integration openlineage``. This will launch an Airflow webserver +``breeze start-airflow --integration openlineage``. This will launch an Airflow API server within the Breeze environment as well as containers running a [Marquez](https://marquezproject.ai/) -webserver and API server. +API server. When you run Airflow Breeze with this integration, in addition to the standard ports (See "Port Forwarding" below), the following are also automatically forwarded: diff --git a/dev/breeze/doc/05_test_commands.rst b/dev/breeze/doc/05_test_commands.rst index f7b45110ab93d..a51013e171a25 100644 --- a/dev/breeze/doc/05_test_commands.rst +++ b/dev/breeze/doc/05_test_commands.rst @@ -68,7 +68,7 @@ To run the whole test class: You can re-run the tests interactively, add extra parameters to pytest and modify the files before re-running the test to iterate over the tests. You can also add more flags when starting the ``breeze shell`` command when you run integration tests or system tests. Read more details about it -in the `testing doc `_ where all the test types and information on how to run them are explained. +in the `testing doc `_ where all the test types and information on how to run them are explained. This applies to all kind of tests - all our tests can be run using pytest. @@ -159,7 +159,7 @@ Here is the detailed set of options for the ``breeze testing providers-test`` co Using ``breeze testing task-sdk-tests`` command ............................................... -The ``breeze testing task-sdk-tests`` command is allows you to run tests for Task SDK without +The ``breeze testing task-sdk-tests`` command allows you to run tests for Task SDK without initializing database. The Task SDK should not need database to be started so this acts as a good check to see if the Task SDK tests are working properly. @@ -241,7 +241,7 @@ Here is the detailed set of options for the ``breeze testing providers-integrati Running Python API client tests ............................... -To run Python API client tests, you need to have airflow python client packaged in dist folder. +To run Python API client tests, you need to have Airflow python client packaged in dist folder. To package the client, clone the airflow-python-client repository and run the following command: .. code-block:: bash @@ -327,7 +327,7 @@ automatically to run the tests. You can: * Setup environment for k8s tests with ``breeze k8s setup-env`` -* Build airflow k8S images with ``breeze k8s build-k8s-image`` +* Build Airflow k8S images with ``breeze k8s build-k8s-image`` * Manage KinD Kubernetes cluster and upload image and deploy Airflow to KinD cluster via ``breeze k8s create-cluster``, ``breeze k8s configure-cluster``, ``breeze k8s deploy-airflow``, ``breeze k8s status``, ``breeze k8s upload-k8s-image``, ``breeze k8s delete-cluster`` commands @@ -398,7 +398,7 @@ Building Airflow K8s images ........................... Before deploying Airflow Helm Chart, you need to make sure the appropriate Airflow image is build (it has -embedded test dags, pod templates and webserver is configured to refresh immediately. This can +embedded test dags, pod templates and api-server is configured to refresh immediately. This can be done via ``breeze k8s build-k8s-image`` command. It can also be done in parallel for all images via ``--run-in-parallel`` flag. @@ -412,7 +412,7 @@ All parameters of the command are here: Uploading Airflow K8s images ............................ -The K8S airflow images need to be uploaded to the KinD cluster. This can be done via +The K8S Airflow images need to be uploaded to the KinD cluster. This can be done via ``breeze k8s upload-k8s-image`` command. It can also be done in parallel for all images via ``--run-in-parallel`` flag. @@ -442,7 +442,7 @@ Deploying Airflow to the Cluster Airflow can be deployed to the Cluster with ``breeze k8s deploy-airflow``. This step will automatically (unless disabled by switches) will rebuild the image to be deployed. It also uses the latest version -of the Airflow Helm Chart to deploy it. You can also choose to upgrade existing airflow deployment +of the Airflow Helm Chart to deploy it. You can also choose to upgrade existing Airflow deployment and pass extra arguments to ``helm install`` or ``helm upgrade`` commands that are used to deploy airflow. By passing ``--run-in-parallel`` the deployment can be run for all clusters in parallel. @@ -457,7 +457,7 @@ All parameters of the command are here: Checking status of the K8S cluster .................................. -You can delete kubernetes cluster and airflow deployed in the current cluster +You can delete kubernetes cluster and Airflow deployed in the current cluster via ``breeze k8s status`` command. It can be also checked for all clusters created so far by passing ``--all`` flag. @@ -517,7 +517,7 @@ Running k8s complete tests .......................... You can run ``breeze k8s run-complete-tests`` command to combine all previous steps in one command. That -command will create cluster, deploy airflow and run tests and finally delete cluster. It is used in CI +command will create cluster, deploy Airflow and run tests and finally delete cluster. It is used in CI to run the whole chains in parallel. Run all tests: @@ -575,7 +575,7 @@ as executor you use, similar to: The shell automatically activates the virtual environment that has all appropriate dependencies installed and you can interactively run all k8s tests with pytest command (of course the cluster need to -be created and airflow deployed to it before running the tests): +be created and Airflow deployed to it before running the tests): .. code-block:: bash diff --git a/dev/breeze/doc/06_managing_docker_images.rst b/dev/breeze/doc/06_managing_docker_images.rst index ac0b0e6b1f61e..d743a7ef7bc17 100644 --- a/dev/breeze/doc/06_managing_docker_images.rst +++ b/dev/breeze/doc/06_managing_docker_images.rst @@ -196,7 +196,7 @@ but here typical examples are presented: breeze prod-image build --additional-airflow-extras "jira" -This installs additional ``jira`` extra while installing airflow in the image. +This installs additional ``jira`` extra while installing Airflow in the image. .. code-block:: bash diff --git a/dev/breeze/doc/10_advanced_breeze_topics.rst b/dev/breeze/doc/10_advanced_breeze_topics.rst index 1f770410e8969..3cd22855db2fe 100644 --- a/dev/breeze/doc/10_advanced_breeze_topics.rst +++ b/dev/breeze/doc/10_advanced_breeze_topics.rst @@ -117,7 +117,7 @@ which will be mapped to ``/files`` in your Docker container. You can pass there configure and run Docker. They will not be removed between Docker runs. By default ``/files/dags`` folder is mounted from your local ``/files/dags`` and this is -the directory used by airflow scheduler and webserver to scan dags for. You can use it to test your dags +the directory used by Airflow scheduler and api-server to scan dags for. You can use it to test your dags from local sources in Airflow. If you wish to add local DAGs that can be run by Breeze. The ``/files/airflow-breeze-config`` folder contains configuration files that might be used to @@ -132,7 +132,7 @@ There are couple of things you might want to do when adding/changing dependencie Breeze. You can add dependencies temporarily (which will last until you exit Breeze shell), or you might want to add them permanently (which require you to rebuild the image). Also there are different things you need to do when you are adding system level (debian) level, Python (pip) dependencies or Node (yarn) -dependencies for the webserver. +dependencies for the api-server. Python dependencies ................... diff --git a/dev/breeze/doc/ci/02_images.md b/dev/breeze/doc/ci/02_images.md index c651558fc18d8..52fa5cacf83ea 100644 --- a/dev/breeze/doc/ci/02_images.md +++ b/dev/breeze/doc/ci/02_images.md @@ -443,7 +443,7 @@ can be used for CI images: | `ADDITIONAL_DEV_APT_DEPS` | | Additional apt dev dependencies installed in the first part of the image | | `ADDITIONAL_DEV_APT_ENV` | | Additional env variables defined when installing dev deps | | `AIRFLOW_PIP_VERSION` | `25.1.1` | `pip` version used. | -| `AIRFLOW_UV_VERSION` | `0.7.8` | `uv` version used. | +| `AIRFLOW_UV_VERSION` | `0.7.14` | `uv` version used. | | `AIRFLOW_PRE_COMMIT_VERSION` | `4.2.0` | `pre-commit` version used. | | `AIRFLOW_PRE_COMMIT_UV_VERSION` | `4.1.4` | `pre-commit-uv` version used. | | `AIRFLOW_USE_UV` | `true` | Whether to use UV for installation. | diff --git a/dev/breeze/doc/images/output-commands.svg b/dev/breeze/doc/images/output-commands.svg index 030caaeb7d5b2..234bfa5ee81a3 100644 --- a/dev/breeze/doc/images/output-commands.svg +++ b/dev/breeze/doc/images/output-commands.svg @@ -326,7 +326,7 @@ --integrationCore Integrations to enable when running (can be more   than one).                                              (all | all-testable | cassandra | celery | drill |      -kafka | kerberos | keycloak | mongo | mssql |           +gremlin | kafka | kerberos | keycloak | mongo | mssql | openlineage | otel | pinot | qdrant | redis | statsd |  trino | ydb)                                            --standalone-dag-processor/--no-standalone-dag-processoRun standalone dag processor for start-airflow          diff --git a/dev/breeze/doc/images/output_build-docs.svg b/dev/breeze/doc/images/output_build-docs.svg index f433121431045..6dec64b87277a 100644 --- a/dev/breeze/doc/images/output_build-docs.svg +++ b/dev/breeze/doc/images/output_build-docs.svg @@ -1,4 +1,4 @@ - + influxdb | jdbc | jenkins | microsoft.azure | microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql |   neo4j | odbc | openai | openfaas | openlineage | opensearch | opsgenie | oracle | pagerduty | papermill | pgvector |   pinecone | postgres | presto | qdrant | redis | salesforce | samba | segment | sendgrid | sftp | singularity | slack | -smtp | snowflake | sqlite | ssh | standard | tableau | telegram | teradata | trino | vertica | weaviate | yandex | ydb -zendesk]...                                                                                                          +smtp | snowflake | sqlite | ssh | standard | tableau | task-sdk | telegram | teradata | trino | vertica | weaviate |   +yandex | ydb | zendesk]...                                                                                             Build documents. -╭─ Doc flags ──────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---docs-only-dOnly build documentation. ---spellcheck-only-sOnly run spell checking. ---clean-buildClean inventories of Inter-Sphinx documentation and generated APIs and sphinx     -artifacts before the build - useful for a clean build.                            ---one-pass-onlyBuilds documentation in one pass only. This is useful for debugging sphinx        -errors.                                                                           ---package-filterFilter(s) to use more than one can be specified. You can use glob pattern         -matching the full package name, for example `apache-airflow-providers-*`. Useful  -when you want to selectseveral similarly named packages together.                 -(TEXT)                                                                            ---include-not-ready-providersWhether to include providers that are not yet ready to be released. ---include-removed-providersWhether to include providers that are removed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---builderBuildx builder used to perform `docker buildx build` commands.(TEXT) -[default: autodetect]                                          ---distributions-listOptional, contains comma-separated list of package ids that are processed for     -documentation building, and document publishing. It is an easier alternative to   -adding individual packages as arguments to every command. This overrides the      -packages passed as arguments.                                                     -(TEXT)                                                                            -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Build scope (default is to build docs and spellcheck) ──────────────────────────────────────────────────────────────╮ +--docs-only-dOnly build documentation. +--spellcheck-only-sOnly run spell checking. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Type of build ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--one-pass-onlyBuilds documentation in one pass only. This is useful for debugging sphinx errors. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Cleaning inventories ───────────────────────────────────────────────────────────────────────────────────────────────╮ +--clean-buildCleans the build directory before building the documentation and removes all        +inventory cache (including external inventories).                                   +--refresh-airflow-inventoriesWhen set, only airflow package inventories will be refreshed, regardless if they    +are already downloaded. With `--clean-build` - everything is cleaned..              +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Filtering options ──────────────────────────────────────────────────────────────────────────────────────────────────╮ +--package-filterFilter(s) to use more than one can be specified. You can use glob pattern matching  +the full package name, for example `apache-airflow-providers-*`. Useful when you    +want to selectseveral similarly named packages together.                            +(TEXT)                                                                              +--include-not-ready-providersWhether to include providers that are not yet ready to be released. +--include-removed-providersWhether to include providers that are removed. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Misc options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--include-commitsInclude commits in the documentation. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--builderBuildx builder used to perform `docker buildx build` commands.(TEXT) +[default: autodetect]                                          +--distributions-listOptional, contains comma-separated list of package ids that are processed for              +documentation building, and document publishing. It is an easier alternative to adding     +individual packages as arguments to every command. This overrides the packages passed as   +arguments.                                                                                 +(TEXT)                                                                                     +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_build-docs.txt b/dev/breeze/doc/images/output_build-docs.txt index 0af15214ce8cc..187145b43d450 100644 --- a/dev/breeze/doc/images/output_build-docs.txt +++ b/dev/breeze/doc/images/output_build-docs.txt @@ -1 +1 @@ -23e0abc8bc26f17cd7e033a3b5654cb4 +baa7465caa7e65ceba2c44f9479a1158 diff --git a/dev/breeze/doc/images/output_release-management.svg b/dev/breeze/doc/images/output_release-management.svg index c880fe75bdda6..96b0ef7e1b010 100644 --- a/dev/breeze/doc/images/output_release-management.svg +++ b/dev/breeze/doc/images/output_release-management.svg @@ -1,4 +1,4 @@ - + CycloneDX SBOMs for Apache Airflow{{project_name}}{{ version }}ń +CycloneDX SBOMs for Apache Airflow{{project_name}}{{ version }}

CycloneDX SBOMs for Apache Airflow{{project_name}}{{ version }}

    diff --git a/dev/breeze/src/airflow_breeze/commands/setup_commands.py b/dev/breeze/src/airflow_breeze/commands/setup_commands.py index 9014c069eb1fe..1429a6b13f458 100644 --- a/dev/breeze/src/airflow_breeze/commands/setup_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/setup_commands.py @@ -498,7 +498,6 @@ def regenerate_help_images_for_all_commands(commands: tuple[str, ...], check_onl env["PYTHONPATH"] = str(BREEZE_SOURCES_PATH) new_hash_dict = get_command_hash_dict() regenerate_all_commands = False - regenerate_all_commands = False commands_list = list(commands) if force: console.print("[info]Force regeneration all breeze command images") diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py index cae711c31eb1a..7f88b8bffaf44 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py @@ -28,6 +28,7 @@ from airflow_breeze.commands.ci_image_commands import rebuild_or_pull_ci_image_if_needed from airflow_breeze.commands.common_options import ( + option_allow_pre_releases, option_backend, option_clean_airflow_installation, option_core_integration, @@ -42,6 +43,7 @@ option_github_repository, option_image_name, option_include_success_outputs, + option_install_airflow_with_constraints, option_keep_env_variables, option_mount_sources, option_mysql_version, @@ -60,13 +62,13 @@ ) from airflow_breeze.commands.common_package_installation_options import ( option_airflow_constraints_reference, - option_install_airflow_with_constraints, option_providers_constraints_location, option_providers_skip_constraints, option_use_distributions_from_dist, ) from airflow_breeze.commands.release_management_commands import option_distribution_format from airflow_breeze.global_constants import ( + ALL_TEST_SUITES, ALL_TEST_TYPE, ALLOWED_TEST_TYPE_CHOICES, GroupOfTests, @@ -91,7 +93,7 @@ check_async_run_results, run_with_pool, ) -from airflow_breeze.utils.path_utils import FILES_PATH, cleanup_python_generated_files +from airflow_breeze.utils.path_utils import AIRFLOW_CTL_ROOT_PATH, FILES_PATH, cleanup_python_generated_files from airflow_breeze.utils.run_tests import ( file_name_from_test_type, generate_args_for_pytest, @@ -516,6 +518,15 @@ def _verify_parallelism_parameters( show_default=True, type=NotVerifiedBetterChoice(ALLOWED_TEST_TYPE_CHOICES[GroupOfTests.PROVIDERS]), ) +option_test_type = click.option( + "--test-type", + help="Type for shell tests to run - used when forcing " + "lowest dependencies to determine which distribution to force lowest dependencies for", + default=ALL_TEST_TYPE, + envvar="TEST_TYPE", + show_default=True, + type=NotVerifiedBetterChoice([*ALL_TEST_SUITES.keys(), *all_selective_core_test_types()]), +) option_test_type_helm = click.option( "--test-type", help="Type of helm tests to run", @@ -609,6 +620,7 @@ def _verify_parallelism_parameters( @option_total_test_timeout @option_upgrade_boto @option_use_airflow_version +@option_allow_pre_releases @option_use_distributions_from_dist @option_use_xdist @option_verbose @@ -673,6 +685,7 @@ def core_tests(**kwargs): @option_total_test_timeout @option_upgrade_boto @option_use_airflow_version +@option_allow_pre_releases @option_use_distributions_from_dist @option_use_xdist @option_verbose @@ -705,6 +718,7 @@ def providers_tests(**kwargs): def task_sdk_tests(**kwargs): _run_test_command( test_group=GroupOfTests.TASK_SDK, + allow_pre_releases=False, airflow_constraints_reference="constraints-main", backend="none", clean_airflow_installation=False, @@ -747,54 +761,30 @@ def task_sdk_tests(**kwargs): allow_extra_args=False, ), ) -@option_collect_only -@option_dry_run -@option_enable_coverage -@option_force_sa_warnings -@option_forward_credentials -@option_github_repository -@option_keep_env_variables -@option_mount_sources +@option_parallelism @option_python -@option_skip_docker_compose_down -@option_test_timeout +@option_dry_run @option_verbose @click.argument("extra_pytest_args", nargs=-1, type=click.Path(path_type=str)) -def airflow_ctl_tests(**kwargs): - _run_test_command( - test_group=GroupOfTests.CTL, - airflow_constraints_reference="constraints-main", - backend="none", - clean_airflow_installation=False, - debug_resources=False, - downgrade_pendulum=False, - downgrade_sqlalchemy=False, - db_reset=False, - include_success_outputs=False, - integration=(), - install_airflow_with_constraints=False, - run_db_tests_only=False, - run_in_parallel=False, - skip_db_tests=True, - use_xdist=True, - excluded_parallel_test_types="", - excluded_providers="", - force_lowest_dependencies=False, - no_db_cleanup=True, - parallel_test_types="", - parallelism=0, - distribution_format="wheel", - providers_constraints_location="", - providers_skip_constraints=False, - skip_cleanup=False, - skip_providers="", - test_type=ALL_TEST_TYPE, - total_test_timeout=DEFAULT_TOTAL_TEST_TIMEOUT, - upgrade_boto=False, - use_airflow_version=None, - use_distributions_from_dist=False, - **kwargs, - ) +def airflow_ctl_tests(python: str, parallelism: int, extra_pytest_args: tuple): + parallelism_args = ["-n", str(parallelism)] if parallelism > 1 else [] + test_command = [ + "uv", + "run", + "--python", + python, + "pytest", + "--color=yes", + *parallelism_args, + *extra_pytest_args, + ] + result = run_command(test_command, cwd=AIRFLOW_CTL_ROOT_PATH, check=False) + if result.returncode != 0: + get_console().print( + f"[error]Airflow CTL tests failed with return code {result.returncode}.[/]\n" + f"Command: {' '.join(test_command)}\n" + ) + sys.exit(result.returncode) @group_for_testing.command( @@ -979,6 +969,14 @@ def integration_providers_tests( @option_mount_sources @option_mysql_version @option_no_db_cleanup +@option_use_airflow_version +@option_allow_pre_releases +@option_airflow_constraints_reference +@option_clean_airflow_installation +@option_force_lowest_dependencies +@option_install_airflow_with_constraints +@option_distribution_format +@option_use_distributions_from_dist @option_postgres_version @option_python @option_skip_docker_compose_down @@ -1002,6 +1000,14 @@ def system_tests( python: str, skip_docker_compose_down: bool, test_timeout: int, + use_airflow_version: str, + allow_pre_releases: bool, + airflow_constraints_reference: str, + clean_airflow_installation: bool, + force_lowest_dependencies: bool, + install_airflow_with_constraints: bool, + distribution_format: str, + use_distributions_from_dist: bool, ): shell_params = ShellParams( test_group=GroupOfTests.SYSTEM, @@ -1022,6 +1028,14 @@ def system_tests( force_sa_warnings=force_sa_warnings, run_tests=True, db_reset=db_reset, + use_airflow_version=use_airflow_version, + allow_pre_releases=allow_pre_releases, + airflow_constraints_reference=airflow_constraints_reference, + clean_airflow_installation=clean_airflow_installation, + force_lowest_dependencies=force_lowest_dependencies, + install_airflow_with_constraints=install_airflow_with_constraints, + distribution_format=distribution_format, + use_distributions_from_dist=use_distributions_from_dist, ) fix_ownership_using_docker() cleanup_python_generated_files() @@ -1238,6 +1252,7 @@ def _run_test_command( *, test_group: GroupOfTests, airflow_constraints_reference: str, + allow_pre_releases: bool, backend: str, collect_only: bool, clean_airflow_installation: bool, @@ -1290,6 +1305,7 @@ def _run_test_command( test_list = [test for test in test_list if test not in excluded_test_list] shell_params = ShellParams( airflow_constraints_reference=airflow_constraints_reference, + allow_pre_releases=allow_pre_releases, backend=backend, collect_only=collect_only, clean_airflow_installation=clean_airflow_installation, diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands_config.py b/dev/breeze/src/airflow_breeze/commands/testing_commands_config.py index 47abc47492e3b..f4cfc212eeaba 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands_config.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands_config.py @@ -89,6 +89,7 @@ "--install-airflow-with-constraints", "--distribution-format", "--use-airflow-version", + "--allow-pre-releases", "--use-distributions-from-dist", ], } @@ -183,16 +184,13 @@ TEST_ADVANCED_FLAGS, ], "breeze testing airflow-ctl-tests": [ - TEST_OPTIONS_NON_DB, { "name": "Test environment", "options": [ "--python", - "--forward-credentials", - "--force-sa-warnings", + "--parallelism", ], }, - TEST_ADVANCED_FLAGS, ], "breeze testing core-integration-tests": [ TEST_OPTIONS_DB, @@ -210,6 +208,7 @@ TEST_OPTIONS_DB, TEST_ENVIRONMENT_DB, TEST_ADVANCED_FLAGS, + TEST_ADVANCED_FLAGS_FOR_INSTALLATION, ], "breeze testing helm-tests": [ { diff --git a/dev/breeze/src/airflow_breeze/commands/workflow_commands.py b/dev/breeze/src/airflow_breeze/commands/workflow_commands.py index 5f1225ad93f92..0fb8c4fc5d90b 100644 --- a/dev/breeze/src/airflow_breeze/commands/workflow_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/workflow_commands.py @@ -17,6 +17,7 @@ from __future__ import annotations import json +import os import re import sys @@ -51,6 +52,11 @@ def workflow_run(): help="Git reference tag to checkout to build documentation.", required=True, ) +@click.option( + "--skip-tag-validation", + help="Skip validation of the tag. Allows to use `main` or commit hash. Use with caution.", + is_flag=True, +) @click.option( "--exclude-docs", help="Comma separated list of docs packages to exclude from the publish.", @@ -72,27 +78,39 @@ def workflow_run_publish( ref: str, exclude_docs: str, site_env: str, + skip_tag_validation: bool, doc_packages: tuple[str, ...], skip_write_to_stable_folder: bool = False, ): + if os.environ.get("GITHUB_TOKEN", ""): + get_console().print("\n[warning]Your authentication will use GITHUB_TOKEN environment variable.") + get_console().print( + "\nThis might not be what you want unless your token has " + "sufficient permissions to trigger workflows." + ) + get_console().print( + "If you remove GITHUB_TOKEN, workflow_run will use the authentication you already " + "set-up with `gh auth login`.\n" + ) get_console().print( f"[blue]Validating ref: {ref}[/blue]", ) - tag_result = run_command( - ["gh", "api", f"repos/apache/airflow/git/refs/tags/{ref}"], - capture_output=True, - check=False, - ) + if not skip_tag_validation: + tag_result = run_command( + ["gh", "api", f"repos/apache/airflow/git/refs/tags/{ref}"], + capture_output=True, + check=False, + ) - stdout = tag_result.stdout.decode("utf-8") - tag_respo = json.loads(stdout) + stdout = tag_result.stdout.decode("utf-8") + tag_respo = json.loads(stdout) - if not tag_respo.get("ref"): - get_console().print( - f"[red]Error: Ref {ref} is not exists in repo apache/airflow .[/red]", - ) - sys.exit(1) + if not tag_respo.get("ref"): + get_console().print( + f"[red]Error: Ref {ref} does not exists in repo apache/airflow .[/red]", + ) + sys.exit(1) get_console().print( f"[blue]Triggering workflow {WORKFLOW_NAME_MAPS['publish-docs']}: at {APACHE_AIRFLOW_REPO}[/blue]", diff --git a/dev/breeze/src/airflow_breeze/commands/workflow_commands_config.py b/dev/breeze/src/airflow_breeze/commands/workflow_commands_config.py index 6a5025dcf9bfb..3ffb4e92f117d 100644 --- a/dev/breeze/src/airflow_breeze/commands/workflow_commands_config.py +++ b/dev/breeze/src/airflow_breeze/commands/workflow_commands_config.py @@ -27,6 +27,7 @@ "name": "Trigger publish docs workflow", "options": [ "--ref", + "--skip-tag-validation", "--exclude-docs", "--site-env", "--skip-write-to-stable-folder", diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index 74c8d1bda3fd7..a81d7cae2e2f8 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -33,6 +33,7 @@ AIRFLOW_CORE_SOURCES_PATH, AIRFLOW_PYPROJECT_TOML_FILE_PATH, AIRFLOW_ROOT_PATH, + AIRFLOW_TASK_SDK_SOURCES_PATH, ) PUBLIC_AMD_RUNNERS = '["ubuntu-22.04"]' @@ -62,6 +63,7 @@ "celery", "cassandra", "drill", + "gremlin", "kafka", "mongo", "mssql", @@ -192,7 +194,7 @@ ALLOWED_INSTALL_MYSQL_CLIENT_TYPES = ["mariadb", "mysql"] PIP_VERSION = "25.1.1" -UV_VERSION = "0.7.8" +UV_VERSION = "0.7.14" DEFAULT_UV_HTTP_TIMEOUT = 300 DEFAULT_WSL2_HTTP_TIMEOUT = 900 @@ -203,6 +205,12 @@ "docker-stack", "helm-chart", "apache-airflow-providers", + "task-sdk", +] + +DESTINATION_LOCATIONS = [ + "s3://live-docs-airflow-apache-org/docs/", + "s3://staging-docs-airflow-apache-org/docs/", ] @@ -358,6 +366,7 @@ def get_default_platform_machine() -> str: DRILL_HOST_PORT = "28047" FLOWER_HOST_PORT = "25555" +GREMLIN_HOST_PORT = "8182" MSSQL_HOST_PORT = "21433" MYSQL_HOST_PORT = "23306" POSTGRES_HOST_PORT = "25433" @@ -546,6 +555,19 @@ def get_airflow_version(): return airflow_version +def get_task_sdk_version(): + task_sdk_init_py_file = AIRFLOW_TASK_SDK_SOURCES_PATH / "airflow" / "sdk" / "__init__.py" + task_sdk_version = "unknown" + with open(task_sdk_init_py_file) as init_file: + while line := init_file.readline(): + if "__version__ = " in line: + task_sdk_version = line.split()[2][1:-1] + break + if task_sdk_version == "unknown": + raise RuntimeError("Unable to determine Task SDK version") + return task_sdk_version + + @clearable_cache def get_airflow_extras(): airflow_dockerfile = AIRFLOW_ROOT_PATH / "Dockerfile" @@ -708,14 +730,20 @@ def generate_provider_dependencies_if_needed(): PROVIDERS_COMPATIBILITY_TESTS_MATRIX: list[dict[str, str | list[str]]] = [ { "python-version": "3.9", - "airflow-version": "2.9.3", - "remove-providers": "cloudant common.messaging fab edge3 git", + "airflow-version": "2.10.5", + "remove-providers": "cloudant common.messaging fab git keycloak", "run-tests": "true", }, { "python-version": "3.9", "airflow-version": "2.11.0", - "remove-providers": "cloudant common.messaging fab git", + "remove-providers": "cloudant common.messaging fab git keycloak", + "run-tests": "true", + }, + { + "python-version": "3.9", + "airflow-version": "3.0.2", + "remove-providers": "cloudant", "run-tests": "true", }, ] diff --git a/dev/breeze/src/airflow_breeze/params/build_ci_params.py b/dev/breeze/src/airflow_breeze/params/build_ci_params.py index 8f509cb2c95e9..65adcf6efde9d 100644 --- a/dev/breeze/src/airflow_breeze/params/build_ci_params.py +++ b/dev/breeze/src/airflow_breeze/params/build_ci_params.py @@ -73,7 +73,6 @@ def prepare_arguments_for_docker_build_command(self) -> list[str]: # optional build args self._set_common_opt_args() self._opt_arg("INSTALL_MYSQL_CLIENT_TYPE", self.install_mysql_client_type) - self._opt_arg("VERSION_SUFFIX", self.version_suffix) # Convert to build args build_args = self._to_build_args() # Add cache directive diff --git a/dev/breeze/src/airflow_breeze/params/doc_build_params.py b/dev/breeze/src/airflow_breeze/params/doc_build_params.py index 2d4985fae2af6..eb0100a335912 100644 --- a/dev/breeze/src/airflow_breeze/params/doc_build_params.py +++ b/dev/breeze/src/airflow_breeze/params/doc_build_params.py @@ -29,6 +29,7 @@ class DocBuildParams: spellcheck_only: bool short_doc_packages: tuple[str, ...] one_pass_only: bool = False + include_commits: bool = False github_actions = os.environ.get("GITHUB_ACTIONS", "false") @property @@ -40,6 +41,8 @@ def args_doc_builder(self) -> list[str]: doc_args.append("--spellcheck-only") if self.one_pass_only: doc_args.append("--one-pass-only") + if self.include_commits: + doc_args.append("--include-commits") if self.package_filter: for filter in self.package_filter: doc_args.extend(["--package-filter", filter]) diff --git a/dev/breeze/src/airflow_breeze/params/shell_params.py b/dev/breeze/src/airflow_breeze/params/shell_params.py index 6a99a7f8cdac0..628001fd111e6 100644 --- a/dev/breeze/src/airflow_breeze/params/shell_params.py +++ b/dev/breeze/src/airflow_breeze/params/shell_params.py @@ -46,6 +46,7 @@ EDGE_EXECUTOR, FAB_AUTH_MANAGER, FLOWER_HOST_PORT, + GREMLIN_HOST_PORT, KEYCLOAK_INTEGRATION, MOUNT_ALL, MOUNT_PROVIDERS_AND_TESTS, @@ -139,7 +140,7 @@ class ShellParams: airflow_constraints_mode: str = ALLOWED_CONSTRAINTS_MODES_CI[0] airflow_constraints_reference: str = "" airflow_extras: str = "" - airflow_skip_constraints: bool = False + allow_pre_releases: bool = False auth_manager: str = ALLOWED_AUTH_MANAGERS[0] backend: str = ALLOWED_BACKENDS[0] base_branch: str = "main" @@ -345,13 +346,23 @@ def compose_file(self) -> str: self.airflow_extras = ( ",".join(current_extras.split(",") + ["celery"]) if current_extras else "celery" ) + if self.auth_manager == FAB_AUTH_MANAGER: + if self.use_airflow_version: + current_extras = self.airflow_extras + if "fab" not in current_extras.split(","): + get_console().print( + "[warning]Adding `fab` extras as it is implicitly needed by FAB auth manager" + ) + self.airflow_extras = ( + ",".join(current_extras.split(",") + ["fab"]) if current_extras else "fab" + ) compose_file_list.append(DOCKER_COMPOSE_DIR / "base.yml") self.add_docker_in_docker(compose_file_list) compose_file_list.extend(backend_files) compose_file_list.append(DOCKER_COMPOSE_DIR / "files.yml") - if os.environ.get("CI", "false") == "true" and self.use_uv: - compose_file_list.append(DOCKER_COMPOSE_DIR / "ci-uv-tests.yml") + if os.environ.get("CI", "false") == "true": + compose_file_list.append(DOCKER_COMPOSE_DIR / "ci-tests.yml") if self.use_airflow_version is not None and self.mount_sources not in USE_AIRFLOW_MOUNT_SOURCES: get_console().print( @@ -513,7 +524,6 @@ def env_variables_for_docker_commands(self) -> dict[str, str]: _set_var(_env, "AIRFLOW_CONSTRAINTS_REFERENCE", self.airflow_constraints_reference) _set_var(_env, "AIRFLOW_ENV", "development") _set_var(_env, "AIRFLOW_EXTRAS", self.airflow_extras) - _set_var(_env, "AIRFLOW_SKIP_CONSTRAINTS", self.airflow_skip_constraints) _set_var(_env, "AIRFLOW_IMAGE_KUBERNETES", self.airflow_image_kubernetes) _set_var(_env, "AIRFLOW_VERSION", self.airflow_version) _set_var(_env, "AIRFLOW__API_AUTH__JWT_SECRET", b64encode(os.urandom(16)).decode("utf-8")) @@ -552,6 +562,7 @@ def env_variables_for_docker_commands(self) -> dict[str, str]: port = 8080 _set_var(_env, "AIRFLOW__EDGE__API_URL", f"http://localhost:{port}/edge_worker/v1/rpcapi") _set_var(_env, "ANSWER", get_forced_answer() or "") + _set_var(_env, "ALLOW_PRE_RELEASES", self.allow_pre_releases) _set_var(_env, "BACKEND", self.backend) _set_var(_env, "BASE_BRANCH", self.base_branch, "main") _set_var(_env, "BREEZE", "true") @@ -578,10 +589,12 @@ def env_variables_for_docker_commands(self) -> dict[str, str]: _set_var(_env, "DRILL_HOST_PORT", None, DRILL_HOST_PORT) _set_var(_env, "ENABLE_COVERAGE", self.enable_coverage) _set_var(_env, "FLOWER_HOST_PORT", None, FLOWER_HOST_PORT) + _set_var(_env, "GREMLIN_HOST_PORT", None, GREMLIN_HOST_PORT) _set_var(_env, "EXCLUDED_PROVIDERS", self.excluded_providers) _set_var(_env, "FORCE_LOWEST_DEPENDENCIES", self.force_lowest_dependencies) _set_var(_env, "SQLALCHEMY_WARN_20", self.force_sa_warnings) _set_var(_env, "GITHUB_ACTIONS", self.github_actions) + _set_var(_env, "GITHUB_TOKEN", self.github_token) _set_var(_env, "HOST_GROUP_ID", self.host_group_id) _set_var(_env, "HOST_OS", self.host_os) _set_var(_env, "HOST_USER_ID", self.host_user_id) diff --git a/dev/breeze/src/airflow_breeze/pre_commit_ids.py b/dev/breeze/src/airflow_breeze/pre_commit_ids.py index 7095d55b43fbd..f2ce310cc5046 100644 --- a/dev/breeze/src/airflow_breeze/pre_commit_ids.py +++ b/dev/breeze/src/airflow_breeze/pre_commit_ids.py @@ -54,6 +54,7 @@ "check-for-inclusive-language", "check-get-lineage-collector-providers", "check-hooks-apply", + "check-i18n-json", "check-imports-in-providers", "check-incorrect-use-of-LoggingMixin", "check-init-decorator-arguments", @@ -73,6 +74,7 @@ "check-provider-yaml-valid", "check-providers-subpackages-init-file-exist", "check-pydevd-left-in-code", + "check-pytest-mark-db-test-in-providers", "check-revision-heads-map", "check-safe-filter-usage-in-html", "check-significant-newsfragments-are-valid", @@ -99,12 +101,18 @@ "end-of-file-fixer", "fix-encoding-pragma", "flynt", + "gci", "generate-airflow-diagrams", + "generate-airflowctl-datamodels", + "generate-airflowctl-help-images", "generate-openapi-spec", "generate-openapi-spec-fab", "generate-pypi-readme", "generate-tasksdk-datamodels", "generate-volumes-for-sources", + "go-mockery", + "go-mod-tidy", + "gofmt", "identity", "insert-license", "kubeconform", @@ -121,6 +129,7 @@ "mypy-providers", "mypy-task-sdk", "pretty-format-json", + "prevent-deprecated-sqlalchemy-usage", "pylint", "python-no-log-warn", "replace-bad-characters", diff --git a/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py b/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py index a8ade2388ecc1..1eded07332772 100644 --- a/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py +++ b/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py @@ -110,15 +110,19 @@ class TypeOfChange(Enum): BREAKING_CHANGE = "x" SKIP = "s" MISC = "m" + MIN_AIRFLOW_VERSION_BUMP = "v" # defines the precedence order for provider version bumps -# BREAKING_CHANGE > FEATURE > BUGFIX > MISC > DOCUMENTATION > SKIP +# BREAKING_CHANGE > FEATURE > MIN_AIRFLOW_VERSION_BUMP > BUGFIX > MISC > DOCUMENTATION > SKIP + +# When MIN_AIRFLOW_VERSION_BUMP is provided, it means that the bump is at least feature precedence_order = { TypeOfChange.SKIP: 0, TypeOfChange.DOCUMENTATION: 1, TypeOfChange.MISC: 2, TypeOfChange.BUGFIX: 3, + TypeOfChange.MIN_AIRFLOW_VERSION_BUMP: 3.5, TypeOfChange.FEATURE: 4, TypeOfChange.BREAKING_CHANGE: 5, } @@ -187,6 +191,7 @@ class PrepareReleaseDocsUserQuitException(Exception): TypeOfChange.FEATURE: "Feature changes - bump in MINOR version needed", TypeOfChange.BREAKING_CHANGE: "Breaking changes - bump in MAJOR version needed", TypeOfChange.MISC: "Miscellaneous changes - bump in PATCHLEVEL version needed", + TypeOfChange.MIN_AIRFLOW_VERSION_BUMP: "Airflow version bump change - bump in MINOR version needed", } @@ -322,9 +327,7 @@ def _get_all_changes_for_package( ) providers_folder_paths_for_git_commit_retrieval = [ provider_details.root_provider_path, - provider_details.previous_source_provider_distribution_path, - provider_details.previous_documentation_provider_distribution_path, - provider_details.original_source_provider_distribution_path, + *provider_details.possible_old_provider_paths, ] if not reapply_templates_only and result.returncode == 0: if get_verbose(): @@ -451,7 +454,7 @@ def _ask_the_user_for_the_type_of_changes(non_interactive: bool) -> TypeOfChange while True: get_console().print( "[warning]Type of change (d)ocumentation, (b)ugfix, (f)eature, (x)breaking " - f"change, (m)misc, (s)kip, (q)uit [{display_answers}]?[/] ", + f"change, (m)isc, (s)kip, airflow_min_(v)ersion_bump (q)uit [{display_answers}]?[/] ", end="", ) try: @@ -506,13 +509,13 @@ def bump_version(v: Version, index: int) -> Version: def _update_version_in_provider_yaml( - provider_id: str, - type_of_change: TypeOfChange, + provider_id: str, type_of_change: TypeOfChange, min_airflow_version_bump: bool = False ) -> tuple[bool, bool, str]: """ Updates provider version based on the type of change selected by the user :param type_of_change: type of change selected :param provider_id: provider package + :param min_airflow_version_bump: if set, ensure that the version bump is at least feature version. :return: tuple of two bools: (with_breaking_change, maybe_with_new_features, original_text) """ provider_details = get_provider_details(provider_id) @@ -533,6 +536,8 @@ def _update_version_in_provider_yaml( v = bump_version(v, VERSION_PATCHLEVEL_INDEX) elif type_of_change == TypeOfChange.MISC: v = bump_version(v, VERSION_PATCHLEVEL_INDEX) + if min_airflow_version_bump: + v = bump_version(v, VERSION_MINOR_INDEX) provider_yaml_path = get_provider_yaml(provider_id) original_provider_yaml_content = provider_yaml_path.read_text() updated_provider_yaml_content = re.sub( @@ -668,7 +673,6 @@ def _update_file( f"[info]Please fix them by replacing with double backticks (``).[/]\n" ) raise PrepareReleaseDocsErrorOccurredException() - get_console().print(f"Linting: {target_file_path}") import restructuredtext_lint @@ -682,6 +686,8 @@ def _update_file( or 'Unknown interpreted text role "doc"' in error.message ): continue + if "airflow-providers-commits" in error.message: + continue real_errors = True get_console().print(f"* [red] {error.message}") if real_errors: @@ -709,6 +715,39 @@ def _update_commits_rst( ) +def _is_test_or_example_dag_only_changes(commit_hash: str) -> bool: + """ + Check if a commit contains only test-related or example DAG changes + by using the git diff command. + + Considers files in airflow/providers/{provider}/tests/ + and airflow/providers/{provider}/src/airflow/providers/{provider}/example_dags/ + as test/example-only files. + + :param commit_hash: The full commit hash to check + :return: True if changes are only in test/example files, False otherwise + """ + try: + result = run_command( + ["git", "diff", "--name-only", f"{commit_hash}^", commit_hash], + cwd=AIRFLOW_ROOT_PATH, + capture_output=True, + text=True, + check=True, + ) + changed_files = result.stdout.strip().splitlines() + + for file_path in changed_files: + if not ( + re.match(r"providers/[^/]+/tests/", file_path) + or re.match(r"providers/[^/]+/src/airflow/providers/[^/]+/example_dags/", file_path) + ): + return False + return True + except subprocess.CalledProcessError: + return False + + def update_release_notes( provider_id: str, reapply_templates_only: bool, @@ -716,7 +755,7 @@ def update_release_notes( regenerate_missing_docs: bool, non_interactive: bool, only_min_version_update: bool, -) -> tuple[bool, bool]: +) -> tuple[bool, bool, bool]: """Updates generated files. This includes the readme, changes, and provider.yaml files. @@ -726,7 +765,8 @@ def update_release_notes( :param base_branch: base branch to check changes in apache remote for changes :param regenerate_missing_docs: whether to regenerate missing docs :param non_interactive: run in non-interactive mode (useful for CI) - :return: tuple of two bools: (with_breaking_change, maybe_with_new_features) + :param only_min_version_update: whether to only update min version + :return: tuple of three bools: (with_breaking_change, maybe_with_new_features, with_min_airflow_version_bump) """ proceed, list_of_list_of_changes, changes_as_table = _get_all_changes_for_package( provider_id=provider_id, @@ -738,6 +778,7 @@ def update_release_notes( maybe_with_new_features = False original_provider_yaml_content: str | None = None marked_for_release = False + with_min_airflow_version_bump = False if not reapply_templates_only: if proceed: if non_interactive: @@ -764,7 +805,7 @@ def update_release_notes( answer = user_confirm(f"Does the provider: {provider_id} have any changes apart from 'doc-only'?") if answer == Answer.NO: _mark_latest_changes_as_documentation_only(provider_id, list_of_list_of_changes) - return with_breaking_changes, maybe_with_new_features + return with_breaking_changes, maybe_with_new_features, False change_table_len = len(list_of_list_of_changes[0]) table_iter = 0 global SHORT_HASH_TO_TYPE_DICT @@ -774,12 +815,25 @@ def update_release_notes( formatted_message = format_message_for_classification( list_of_list_of_changes[0][table_iter].message_without_backticks ) - get_console().print( - f"[green]Define the type of change for " - f"`{formatted_message}`" - f" by referring to the above table[/]" - ) - type_of_change = _ask_the_user_for_the_type_of_changes(non_interactive=non_interactive) + change = list_of_list_of_changes[0][table_iter] + + if change.pr and _is_test_or_example_dag_only_changes(change.full_hash): + get_console().print( + f"[green]Automatically classifying change as SKIPPED since it only contains test changes:[/]\n" + f"[blue]{formatted_message}[/]" + ) + type_of_change = TypeOfChange.SKIP + else: + get_console().print( + f"[green]Define the type of change for " + f"`{formatted_message}`" + f" by referring to the above table[/]" + ) + type_of_change = _ask_the_user_for_the_type_of_changes(non_interactive=non_interactive) + + if type_of_change == TypeOfChange.MIN_AIRFLOW_VERSION_BUMP: + with_min_airflow_version_bump = True + change_hash = list_of_list_of_changes[0][table_iter].short_hash SHORT_HASH_TO_TYPE_DICT[change_hash] = type_of_change type_of_current_package_changes.append(type_of_change) @@ -797,6 +851,10 @@ def update_release_notes( f"[special]{TYPE_OF_CHANGE_DESCRIPTION[type_of_change]}" ) get_console().print() + bump = False + if type_of_change == TypeOfChange.MIN_AIRFLOW_VERSION_BUMP: + bump = True + type_of_change = TypeOfChange.MISC if type_of_change in [ TypeOfChange.BUGFIX, TypeOfChange.FEATURE, @@ -804,7 +862,9 @@ def update_release_notes( TypeOfChange.MISC, ]: with_breaking_changes, maybe_with_new_features, original_provider_yaml_content = ( - _update_version_in_provider_yaml(provider_id=provider_id, type_of_change=type_of_change) + _update_version_in_provider_yaml( + provider_id=provider_id, type_of_change=type_of_change, min_airflow_version_bump=bump + ) ) if not reapply_templates_only: _update_source_date_epoch_in_provider_yaml(provider_id) @@ -851,9 +911,14 @@ def update_release_notes( TypeOfChange.BREAKING_CHANGE, TypeOfChange.MISC, ]: + bump = False + if type_of_change == TypeOfChange.MIN_AIRFLOW_VERSION_BUMP: + bump = True + type_of_change = TypeOfChange.MISC with_breaking_changes, maybe_with_new_features, _ = _update_version_in_provider_yaml( provider_id=provider_id, type_of_change=type_of_change, + min_airflow_version_bump=bump, ) if not reapply_templates_only: _update_source_date_epoch_in_provider_yaml(provider_id) @@ -882,7 +947,7 @@ def update_release_notes( provider_details.documentation_provider_distribution_path, regenerate_missing_docs, ) - return with_breaking_changes, maybe_with_new_features + return with_breaking_changes, maybe_with_new_features, with_min_airflow_version_bump def _find_insertion_index_for_version(content: list[str], version: str) -> tuple[int, bool]: @@ -933,7 +998,7 @@ def _get_changes_classified( if type_of_change == TypeOfChange.BUGFIX: classified_changes.fixes.append(change) - elif type_of_change == TypeOfChange.MISC: + elif type_of_change == TypeOfChange.MISC or type_of_change == TypeOfChange.MIN_AIRFLOW_VERSION_BUMP: classified_changes.misc.append(change) elif type_of_change == TypeOfChange.FEATURE and maybe_with_new_features: classified_changes.features.append(change) @@ -953,6 +1018,7 @@ def _generate_new_changelog( context: dict[str, Any], with_breaking_changes: bool, maybe_with_new_features: bool, + with_min_airflow_version_bump: bool = False, ): latest_version = provider_details.versions[0] current_changelog = provider_details.changelog_path.read_text() @@ -995,6 +1061,7 @@ def _generate_new_changelog( "version": latest_version, "version_header": "." * len(latest_version), "classified_changes": classified_changes, + "min_airflow_version_bump": with_min_airflow_version_bump, } ) generated_new_changelog = render_template( @@ -1065,6 +1132,7 @@ def update_changelog( with_breaking_changes: bool, maybe_with_new_features: bool, only_min_version_update: bool, + with_min_airflow_version_bump: bool, ): """Internal update changelog method. @@ -1074,6 +1142,7 @@ def update_changelog( :param with_breaking_changes: whether there are any breaking changes :param maybe_with_new_features: whether there are any new features :param only_min_version_update: whether to only update the min version + :param with_min_airflow_version_bump: whether there is a min airflow version bump anywhere """ provider_details = get_provider_details(package_id) jinja_context = get_provider_documentation_jinja_context( @@ -1103,6 +1172,7 @@ def update_changelog( context=jinja_context, with_breaking_changes=with_breaking_changes, maybe_with_new_features=maybe_with_new_features, + with_min_airflow_version_bump=with_min_airflow_version_bump, ) get_console().print(f"\n[info]Update index.rst for {package_id}\n") _update_index_rst(jinja_context, package_id, provider_details.documentation_provider_distribution_path) diff --git a/dev/breeze/src/airflow_breeze/provider_issue_TEMPLATE.md.jinja2 b/dev/breeze/src/airflow_breeze/provider_issue_TEMPLATE.md.jinja2 index ba859655e210a..e4c035d4a05d1 100644 --- a/dev/breeze/src/airflow_breeze/provider_issue_TEMPLATE.md.jinja2 +++ b/dev/breeze/src/airflow_breeze/provider_issue_TEMPLATE.md.jinja2 @@ -16,7 +16,7 @@ These are providers that require testing as there were some substantial changes {%- if pr.number in linked_issues %} Linked issues: {%- for linked_issue in linked_issues[pr.number] %} - - [ ] [Linked Issue #{{ linked_issue.number }}]({{ linked_issue.html_url }}): @{{ linked_issue.user.login }} + - [Linked Issue #{{ linked_issue.number }}]({{ linked_issue.html_url }}): @{{ linked_issue.user.login }} {%- endfor %} {%- endif %} {%- endfor %} diff --git a/dev/breeze/src/airflow_breeze/templates/CHANGELOG_TEMPLATE.rst.jinja2 b/dev/breeze/src/airflow_breeze/templates/CHANGELOG_TEMPLATE.rst.jinja2 index 53e70714068f4..a9b55b2312b58 100644 --- a/dev/breeze/src/airflow_breeze/templates/CHANGELOG_TEMPLATE.rst.jinja2 +++ b/dev/breeze/src/airflow_breeze/templates/CHANGELOG_TEMPLATE.rst.jinja2 @@ -20,6 +20,16 @@ {{ version }} {{ version_header }} + +{%- if min_airflow_version_bump %} + +.. note:: + This release of provider is only available for Airflow X.Y+ as explained in the + Apache Airflow providers support policy _. + +{%- endif %} + + {%- if WITH_BREAKING_CHANGES and classified_changes.breaking_changes %} Breaking changes diff --git a/dev/breeze/src/airflow_breeze/templates/PROVIDER_COMMITS_TEMPLATE.rst.jinja2 b/dev/breeze/src/airflow_breeze/templates/PROVIDER_COMMITS_TEMPLATE.rst.jinja2 index 12beac1846bc4..8d1ead8c260dc 100644 --- a/dev/breeze/src/airflow_breeze/templates/PROVIDER_COMMITS_TEMPLATE.rst.jinja2 +++ b/dev/breeze/src/airflow_breeze/templates/PROVIDER_COMMITS_TEMPLATE.rst.jinja2 @@ -49,7 +49,4 @@ Package {{ PACKAGE_PIP_NAME }} This is detailed commit list of changes for versions provider package: ``{{PROVIDER_ID}}``. For high-level changelog, see :doc:`package information including changelog `. -{%- if DETAILED_CHANGES_PRESENT %} - -{{ DETAILED_CHANGES_RST | safe }} -{%- endif %} +.. airflow-providers-commits:: diff --git a/dev/breeze/src/airflow_breeze/utils/add_back_references.py b/dev/breeze/src/airflow_breeze/utils/add_back_references.py index cbc0369c7ce04..6a9896c5da541 100644 --- a/dev/breeze/src/airflow_breeze/utils/add_back_references.py +++ b/dev/breeze/src/airflow_breeze/utils/add_back_references.py @@ -85,7 +85,7 @@ def create_back_reference_html(back_ref_url: str, target_path: Path): version_match = re.compile(r"[0-9]+.[0-9]+.[0-9]+") target_path_as_posix = target_path.as_posix() if "/stable/" in target_path_as_posix: - prefix, postfix = target_path_as_posix.split("/stable/") + prefix, postfix = target_path_as_posix.split("/stable/", maxsplit=1) base_folder = Path(prefix) for folder in base_folder.iterdir(): if folder.is_dir() and version_match.match(folder.name): @@ -96,7 +96,7 @@ def create_back_reference_html(back_ref_url: str, target_path: Path): def generate_back_references(link: str, base_path: Path): if not base_path.exists(): - get_console().print("[blue]The provider is not yet released.Skipping.") + get_console().print(f"[blue]The folder {base_path} does not exist. Skipping.") return is_downloaded, file_name = download_file(link) if not is_downloaded: @@ -130,7 +130,10 @@ def generate_back_references(link: str, base_path: Path): create_back_reference_html(relative_path, dest_file_path) -def start_generating_back_references(airflow_site_directory: Path, short_provider_ids: list[str]): +def start_generating_back_references( + airflow_site_directory: Path, + short_provider_ids: list[str], +): docs_archive_path = airflow_site_directory / "docs-archive" airflow_docs_path = docs_archive_path / "apache-airflow" helm_docs_path = docs_archive_path / "helm-chart" diff --git a/dev/breeze/src/airflow_breeze/utils/docs_publisher.py b/dev/breeze/src/airflow_breeze/utils/docs_publisher.py index 09cb20e7ea46a..6028877db166e 100644 --- a/dev/breeze/src/airflow_breeze/utils/docs_publisher.py +++ b/dev/breeze/src/airflow_breeze/utils/docs_publisher.py @@ -18,18 +18,17 @@ import os import shutil -from pathlib import Path -from airflow_breeze.global_constants import get_airflow_version +from airflow_breeze.global_constants import get_airflow_version, get_task_sdk_version from airflow_breeze.utils.console import Output, get_console from airflow_breeze.utils.helm_chart_utils import chart_version from airflow_breeze.utils.packages import get_provider_distributions_metadata, get_short_package_name +from airflow_breeze.utils.path_utils import AIRFLOW_ROOT_PATH from airflow_breeze.utils.publish_docs_helpers import pretty_format_path PROCESS_TIMEOUT = 15 * 60 -ROOT_PROJECT_DIR = Path(__file__).parents[5].resolve() -GENERATED_PATH = ROOT_PROJECT_DIR / "generated" +GENERATED_PATH = AIRFLOW_ROOT_PATH / "generated" class DocsPublisher: @@ -67,6 +66,8 @@ def _current_version(self): if self.package_name.startswith("apache-airflow-providers-"): provider = get_provider_distributions_metadata().get(get_short_package_name(self.package_name)) return provider["versions"][0] + if self.package_name == "task-sdk": + return get_task_sdk_version() if self.package_name == "helm-chart": return chart_version() raise SystemExit(f"Unsupported package: {self.package_name}") diff --git a/dev/breeze/src/airflow_breeze/utils/gh_workflow_utils.py b/dev/breeze/src/airflow_breeze/utils/gh_workflow_utils.py index 30659b363b088..1b1e3dabab8a4 100644 --- a/dev/breeze/src/airflow_breeze/utils/gh_workflow_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/gh_workflow_utils.py @@ -19,6 +19,7 @@ import json import sys import time +from shutil import which from airflow_breeze.utils.console import get_console from airflow_breeze.utils.run_utils import run_command @@ -54,6 +55,16 @@ def tigger_workflow(workflow_name: str, repo: str, branch: str = "main", **kwarg time.sleep(5) +def make_sure_gh_is_installed(): + if not which("gh"): + get_console().print( + "[red]Error! The `gh` tool is not installed.[/]\n\n" + "[yellow]You need to install `gh` tool (see https://github.com/cli/cli) and " + "run `gh auth login` to connect your repo to GitHub." + ) + sys.exit(1) + + def get_workflow_run_id(workflow_name: str, repo: str) -> int: """ Get the latest workflow run ID for a given workflow name and repository. @@ -61,6 +72,7 @@ def get_workflow_run_id(workflow_name: str, repo: str) -> int: :param workflow_name: The name of the workflow to check. :param repo: The repository in the format 'owner/repo'. """ + make_sure_gh_is_installed() command = [ "gh", "run", @@ -85,7 +97,13 @@ def get_workflow_run_id(workflow_name: str, repo: str) -> int: get_console().print("[red]No workflow runs found.[/red]") sys.exit(1) - return json.loads(runs_data)[0]["databaseId"] + run_id = json.loads(runs_data)[0].get("databaseId") + + get_console().print( + f"[blue]Running workflow {workflow_name} at https://github.com/{repo}/actions/runs/{run_id}[/blue]", + ) + + return run_id def get_workflow_run_info(run_id: str, repo: str, fields: str) -> dict: @@ -96,6 +114,7 @@ def get_workflow_run_info(run_id: str, repo: str, fields: str) -> dict: :param repo: Workflow repository example: 'apache/airflow' :param fields: Comma-separated fields to retrieve from the workflow run to fetch. eg: "status,conclusion,name,jobs" """ + make_sure_gh_is_installed() command = ["gh", "run", "view", run_id, "--json", fields, "--repo", repo] result = run_command(command, capture_output=True, check=False) diff --git a/dev/breeze/src/airflow_breeze/utils/github.py b/dev/breeze/src/airflow_breeze/utils/github.py index fdd4b8aea344f..d1a9c2621045e 100644 --- a/dev/breeze/src/airflow_breeze/utils/github.py +++ b/dev/breeze/src/airflow_breeze/utils/github.py @@ -23,7 +23,7 @@ import zipfile from datetime import datetime, timezone from pathlib import Path -from typing import Any +from typing import TYPE_CHECKING, Any from rich.markup import escape @@ -32,6 +32,9 @@ from airflow_breeze.utils.path_utils import AIRFLOW_ROOT_PATH from airflow_breeze.utils.shared_options import get_dry_run +if TYPE_CHECKING: + from requests import Response + def get_ga_output(name: str, value: Any) -> str: output_name = name.replace("_", "-") @@ -40,6 +43,45 @@ def get_ga_output(name: str, value: Any) -> str: return f"{output_name}={printed_value}" +def log_github_rate_limit_error(response: Response) -> None: + """ + Logs info about GitHub rate limit errors (primary or secondary). + """ + if response.status_code not in (403, 429): + return + + remaining = response.headers.get("x-rateLimit-remaining") + reset = response.headers.get("x-rateLimit-reset") + retry_after = response.headers.get("retry-after") + + try: + message = response.json().get("message", "") + except Exception: + message = response.text or "" + + remaining_int = int(remaining) if remaining and remaining.isdigit() else None + + if reset and reset.isdigit(): + reset_dt = datetime.fromtimestamp(int(reset), tz=timezone.utc) + reset_time = reset_dt.strftime("%Y-%m-%d %H:%M:%S UTC") + else: + reset_time = "unknown" + + if remaining_int == 0: + print(f"Primary rate limit exceeded. No requests remaining. Reset at {reset_time}.") + return + + # Message for secondary looks like: "You have exceeded a secondary rate limit" + if "secondary rate limit" in message.lower(): + if retry_after and retry_after.isdigit(): + print(f"Secondary rate limit exceeded. Retry after {retry_after} seconds.") + else: + print(f"Secondary rate limit exceeded. Please wait until {reset_time} or at least 60 seconds.") + return + + print(f"Rate limit error. Status: {response.status_code}, Message: {message}") + + def download_file_from_github( tag: str, path: str, output_file: Path, github_token: str | None = None, timeout: int = 60 ) -> bool: @@ -64,6 +106,7 @@ def download_file_from_github( headers["X-GitHub-Api-Version"] = "2022-11-28" try: response = requests.get(url, headers=headers, timeout=timeout) + log_github_rate_limit_error(response) if response.status_code == 403: get_console().print( f"[error]Access denied to {url}. This may be caused by:\n" diff --git a/dev/breeze/src/airflow_breeze/utils/packages.py b/dev/breeze/src/airflow_breeze/utils/packages.py index c2c30ffa24de9..08b63372f0d8c 100644 --- a/dev/breeze/src/airflow_breeze/utils/packages.py +++ b/dev/breeze/src/airflow_breeze/utils/packages.py @@ -56,7 +56,7 @@ from airflow_breeze.utils.version_utils import remove_local_version_suffix from airflow_breeze.utils.versions import get_version_tag, strip_leading_zeros_from_version -MIN_AIRFLOW_VERSION = "2.9.0" +MIN_AIRFLOW_VERSION = "2.10.0" HTTPS_REMOTE = "apache-https-for-providers" LONG_PROVIDERS_PREFIX = "apache-airflow-providers-" @@ -82,12 +82,10 @@ class ProviderPackageDetails(NamedTuple): source_date_epoch: int full_package_name: str pypi_package_name: str - original_source_provider_distribution_path: Path root_provider_path: Path base_provider_package_path: Path documentation_provider_distribution_path: Path - previous_documentation_provider_distribution_path: Path - previous_source_provider_distribution_path: Path + possible_old_provider_paths: list[Path] changelog_path: Path provider_description: str dependencies: list[str] @@ -431,21 +429,20 @@ def find_matching_long_package_names( ) -# !!!! We should not remove those old/original package paths as they are used to get changes -# When documentation is generated using git_log -def get_original_source_distribution_path(provider_id: str) -> Path: - return AIRFLOW_ORIGINAL_PROVIDERS_DIR.joinpath(*provider_id.split(".")) +def get_provider_root_path(provider_id: str) -> Path: + return AIRFLOW_PROVIDERS_ROOT_PATH / provider_id.replace(".", "/") -def get_previous_source_providers_distribution_path(provider_id: str) -> Path: - return PREVIOUS_AIRFLOW_PROVIDERS_NS_PACKAGE_PATH.joinpath(*provider_id.split(".")) - - -def get_previous_documentation_distribution_path(provider_id: str) -> Path: - return DOCS_ROOT / f"apache-airflow-providers-{provider_id.replace('.', '-')}" - - -# End of do not remove those package paths. +def get_possible_old_provider_paths(provider_id: str) -> list[Path]: + # This is used to get historical commits for the provider + paths: list[Path] = [] + paths.append(AIRFLOW_ORIGINAL_PROVIDERS_DIR.joinpath(*provider_id.split("."))) + paths.append(PREVIOUS_AIRFLOW_PROVIDERS_NS_PACKAGE_PATH.joinpath(*provider_id.split("."))) + paths.append(DOCS_ROOT / f"apache-airflow-providers-{provider_id.replace('.', '-')}") + if provider_id == "edge3": + paths.append(get_provider_root_path("edge")) + paths.append(get_provider_root_path("edgeexecutor")) + return paths def get_documentation_package_path(provider_id: str) -> Path: @@ -565,13 +562,7 @@ def get_provider_details(provider_id: str) -> ProviderPackageDetails: pypi_package_name=f"apache-airflow-providers-{provider_id.replace('.', '-')}", root_provider_path=root_provider_path, base_provider_package_path=base_provider_package_path, - original_source_provider_distribution_path=get_original_source_distribution_path(provider_id), - previous_documentation_provider_distribution_path=get_previous_documentation_distribution_path( - provider_id - ), - previous_source_provider_distribution_path=get_previous_source_providers_distribution_path( - provider_id - ), + possible_old_provider_paths=get_possible_old_provider_paths(provider_id), documentation_provider_distribution_path=documentation_provider_distribution_path, changelog_path=changelog_path, provider_description=provider_info["description"], diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py b/dev/breeze/src/airflow_breeze/utils/path_utils.py index 0edd3601a087f..a9cef7c0dc1fd 100644 --- a/dev/breeze/src/airflow_breeze/utils/path_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py @@ -289,6 +289,8 @@ def find_airflow_root_path_to_operate_on() -> Path: AIRFLOW_PYPROJECT_TOML_FILE_PATH = AIRFLOW_ROOT_PATH / "pyproject.toml" AIRFLOW_CORE_ROOT_PATH = AIRFLOW_ROOT_PATH / "airflow-core" AIRFLOW_CORE_SOURCES_PATH = AIRFLOW_CORE_ROOT_PATH / "src" +AIRFLOW_TASK_SDK_ROOT_PATH = AIRFLOW_ROOT_PATH / "task-sdk" +AIRFLOW_TASK_SDK_SOURCES_PATH = AIRFLOW_TASK_SDK_ROOT_PATH / "src" AIRFLOW_WWW_DIR = AIRFLOW_CORE_SOURCES_PATH / "airflow" / "www" AIRFLOW_UI_DIR = AIRFLOW_CORE_SOURCES_PATH / "airflow" / "ui" # Do not delete it - it is used for old commit retrieval from providers diff --git a/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py b/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py new file mode 100644 index 0000000000000..eb64174e8df08 --- /dev/null +++ b/dev/breeze/src/airflow_breeze/utils/publish_docs_to_s3.py @@ -0,0 +1,358 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import json +import os +import subprocess +import sys +from functools import cached_property + +import awswrangler as wr +import boto3 + +from airflow_breeze.utils.console import get_console +from airflow_breeze.utils.parallel import check_async_run_results, run_with_pool + +PROVIDER_NAME_FORMAT = "apache-airflow-providers-{}" + +NON_SHORT_NAME_PACKAGES = ["docker-stack", "helm-chart", "apache-airflow"] + +PACKAGES_METADATA_EXCLUDE_NAMES = ["docker-stack", "apache-airflow-providers"] + +s3_client = boto3.client("s3") +cloudfront_client = boto3.client("cloudfront") + +version_error = False + + +def get_cloudfront_distribution(destination_location): + if "live-docs" in destination_location: + return "E26P75MP9PMULE" + return "E197MS0XRJC5F3" + + +class S3DocsPublish: + def __init__( + self, + source_dir_path: str, + destination_location: str, + exclude_docs: str, + dry_run: bool = False, + overwrite: bool = False, + parallelism: int = 1, + skip_write_to_stable_folder: bool = False, + ): + self.source_dir_path = source_dir_path + self.destination_location = destination_location + self.exclude_docs = exclude_docs + self.dry_run = dry_run + self.overwrite = overwrite + self.parallelism = parallelism + self.source_dest_mapping: list[tuple[str, str]] = [] + self.skip_write_to_stable_folder = skip_write_to_stable_folder + + @cached_property + def get_all_docs(self): + get_console().print(f"[info]Getting all docs from {self.source_dir_path}\n") + try: + all_docs = os.listdir(self.source_dir_path) + except FileNotFoundError: + get_console().print(f"[error]No docs found in {self.source_dir_path}\n") + sys.exit(1) + return all_docs + + @cached_property + def get_all_excluded_docs(self): + if not self.exclude_docs: + return [] + excluded_docs = self.exclude_docs.split(",") + + # We remove `no-docs-excluded` string, this will be send from github workflows input as default value. + if "no-docs-excluded" in excluded_docs: + excluded_docs.remove("no-docs-excluded") + return excluded_docs + + @cached_property + def get_all_eligible_docs(self): + """ + It excludes the docs that are in the exclude list + """ + non_eligible_docs = [] + + for excluded_doc in self.get_all_excluded_docs: + if excluded_doc in NON_SHORT_NAME_PACKAGES: + non_eligible_docs.append(excluded_doc) + continue + + for doc in self.get_all_docs: + excluded_provider_name = PROVIDER_NAME_FORMAT.format(excluded_doc.replace(".", "-")) + if doc == excluded_provider_name: + non_eligible_docs.append(doc) + continue + + docs_to_process = list(set(self.get_all_docs) - set(non_eligible_docs)) + if not docs_to_process: + get_console().print("[error]No eligible docs found, all docs are excluded\n") + sys.exit(1) + + return docs_to_process + + def doc_exists(self, s3_bucket_doc_location: str) -> bool: + bucket, key = self.get_bucket_key(s3_bucket_doc_location) + response = s3_client.list_objects_v2(Bucket=bucket, Prefix=key) + + return response["KeyCount"] > 0 + + def sync_docs_to_s3(self, source: str, destination: str): + if self.dry_run: + get_console().print(f"Dry run enabled, skipping sync operation {source} to {destination}") + return (0, "") + get_console().print(f"[info]Syncing {source} to {destination}\n") + result = subprocess.run( + ["aws", "s3", "sync", "--delete", source, destination], capture_output=True, text=True + ) + return (result.returncode, result.stderr) + + def publish_stable_version_docs(self): + """ + Publish stable version docs to S3. The source dir should have a stable.txt file and it + publishes to two locations: one with the version folder and another with stable folder + ex: + docs/apache-airflow-providers-apache-cassandra/1.0.0 + docs/apache-airflow-providers-apache-cassandra/stable + """ + + for doc in self.get_all_eligible_docs: + # PACKAGES_METADATA_EXCLUDE_NAMES has no stable versions so we copy them directly + if doc not in PACKAGES_METADATA_EXCLUDE_NAMES: + stable_file_path = f"{self.source_dir_path}/{doc}/stable.txt" + if os.path.exists(stable_file_path): + with open(stable_file_path) as stable_file: + stable_version = stable_file.read() + get_console().print(f"[info]Stable version: {stable_version} for {doc}\n") + else: + get_console().print( + f"[info]Skipping, stable version file not found for {doc} in {stable_file_path}\n" + ) + continue + + dest_doc_versioned_folder = f"{self.destination_location}/{doc}/{stable_version}/" + dest_doc_stable_folder = f"{self.destination_location}/{doc}/stable/" + + if self.doc_exists(dest_doc_versioned_folder): + if self.overwrite: + get_console().print( + f"[info]Overwriting existing version {stable_version} for {doc}\n" + ) + else: + get_console().print( + f"[info]Skipping doc publish for {doc} as version {stable_version} already exists\n" + ) + continue + + source_dir_doc_path = f"{self.source_dir_path}/{doc}/{stable_version}/" + + self.source_dest_mapping.append((source_dir_doc_path, dest_doc_versioned_folder)) + + if not self.skip_write_to_stable_folder: + self.source_dest_mapping.append((source_dir_doc_path, dest_doc_stable_folder)) + else: + source_dir_doc_path = f"{self.source_dir_path}/{doc}/" + dest_doc_versioned_folder = f"{self.destination_location}/{doc}/" + self.source_dest_mapping.append((source_dir_doc_path, dest_doc_versioned_folder)) + + if self.source_dest_mapping: + self.run_publish() + + def publish_all_docs(self): + for doc in self.get_all_eligible_docs: + dest_doc_folder = f"{self.destination_location}/{doc}/" + if self.doc_exists(dest_doc_folder): + if self.overwrite: + get_console().print(f"[info]Overwriting existing {dest_doc_folder}\n") + else: + get_console().print( + f"[info]Skipping doc publish for {dest_doc_folder} as already exists\n" + ) + continue + + source_dir_doc_path = f"{self.source_dir_path}/{doc}/" + self.source_dest_mapping.append((source_dir_doc_path, dest_doc_folder)) + + if self.source_dest_mapping: + self.run_publish() + + def run_publish(self): + all_params = [ + f"Publish docs from {source} to {destination}" for source, destination in self.source_dest_mapping + ] + + with run_with_pool( + parallelism=self.parallelism, + all_params=all_params, + ) as (pool, outputs): + results = [ + pool.apply_async( + self.sync_docs_to_s3, + kwds={ + "source": source, + "destination": destination, + }, + ) + for source, destination in self.source_dest_mapping + ] + + check_async_run_results( + results=results, + success="All docs published successfully", + outputs=outputs, + include_success_outputs=False, + ) + + # Now generate the packages-metadata.json + self.generate_packages_metadata() + + # Add redirects to package folders + [ + self.add_redirect(destination) + for _, destination in self.source_dest_mapping + if destination.endswith("stable/") + ] + + def generate_packages_metadata(self): + get_console().print("[info]Generating packages-metadata.json file\n") + + if self.dry_run: + get_console().print("Dry run enabled, skipping packages-metadata.json generation") + return + + package_versions_map = {} + s3_docs_path = self.destination_location.rstrip("/") + "/" + resp = wr.s3.list_directories(s3_docs_path) + + # package_path: s3://staging-docs-airflow-apache-org/docs/apache-airflow-providers-apache-cassandra/ + for package_path in resp: + package_name = package_path.replace(s3_docs_path, "").rstrip("/") + + if package_name in PACKAGES_METADATA_EXCLUDE_NAMES: + continue + + # version_path: s3://staging-docs-airflow-apache-org/docs/apache-airflow-providers-apache-cassandra/1.0.0/ + + versions = [ + version_path.replace(package_path, "").rstrip("/") + for version_path in wr.s3.list_directories(package_path) + if version_path.replace(package_path, "").rstrip("/") != "stable" + ] + package_versions_map[package_name] = versions + + all_packages_infos = self.dump_docs_package_metadata(package_versions_map) + + bucket, _ = self.get_bucket_key(self.destination_location) + + # We keep metadata in the same location with constant file name so that + # its easy to reference in airflow-site with url + # ex: https://staging-docs-airflow-apache-org.s3.us-east-2.amazonaws.com/manifest/packages-metadata.json + get_console().print("[info]Uploading packages-metadata.json to S3\n") + s3_client.put_object( + Bucket=bucket, + Key="manifest/packages-metadata.json", + Body=json.dumps(all_packages_infos, indent=2), + ContentType="application/json", + ) + get_console().print("[success]packages-metadata.json file generated successfully\n") + distribution_id = get_cloudfront_distribution(self.destination_location) + get_console().print( + f"[info]Invalidating CloudFront cache for the uploaded files: distribution id {distribution_id}\n" + ) + # We invalidate all CloudFront caches so that all uploaded files are available immediately + cloudfront_client.create_invalidation( + DistributionId=distribution_id, + InvalidationBatch={ + "Paths": { + "Quantity": 1, + "Items": ["/*"], + }, + "CallerReference": str(int(os.environ.get("GITHUB_RUN_ID", 0))), + }, + ) + get_console().print( + f"[success]CloudFront cache request invalidated successfully: {distribution_id}\n" + ) + + def dump_docs_package_metadata(self, package_versions: dict[str, list[str]]): + all_packages_infos = [ + { + "package-name": package_name, + "all-versions": (all_versions := self.get_all_versions(package_name, versions)), + "stable-version": all_versions[-1], + } + for package_name, versions in package_versions.items() + ] + + return all_packages_infos + + @staticmethod + def get_all_versions(package_name: str, versions: list[str]) -> list[str]: + from packaging.version import Version + + good_versions = [] + for version in versions: + try: + Version(version) + good_versions.append(version) + except ValueError as e: + get_console().print(f"[error]Invalid version {version}: {e}\n") + global version_error + version_error = True + return sorted( + good_versions, + key=lambda d: Version(d), + ) + + @staticmethod + def get_bucket_key(bucket_path: str) -> tuple[str, str]: + parts = bucket_path[5:].split("/", 1) + bucket = parts[0] + key = parts[1] + return bucket, key + + def add_redirect(self, path: str): + """ + Add redirects for the docs to the S3 bucket + ex: The redirect will be placed in the docs/{package}/index.html + """ + bucket, key = self.get_bucket_key(path) + + redirect_path = f"/{key}index.html" + s3_key = key.replace("stable/", "") + "index.html" + + get_console().print(f"[info]Adding redirect {redirect_path} in {s3_key}\n") + + html_body = f""" + + + +""" + + s3_client.put_object( + Bucket=bucket, + Key=s3_key, + Body=html_body, + ContentType="text/html", + ) diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 17e1b08b482e7..32d369d8bfc20 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -100,6 +100,7 @@ class FileGroupForCi(Enum): ALWAYS_TESTS_FILES = "always_test_files" API_FILES = "api_files" GIT_PROVIDER_FILES = "git_provider_files" + STANDARD_PROVIDER_FILES = "standard_provider_files" API_CODEGEN_FILES = "api_codegen_files" HELM_FILES = "helm_files" DEPENDENCY_FILES = "dependency_files" @@ -108,12 +109,14 @@ class FileGroupForCi(Enum): SYSTEM_TEST_FILES = "system_tests" KUBERNETES_FILES = "kubernetes_files" TASK_SDK_FILES = "task_sdk_files" + GO_SDK_FILES = "go_sdk_files" AIRFLOW_CTL_FILES = "airflow_ctl_files" ALL_PYTHON_FILES = "all_python_files" ALL_SOURCE_FILES = "all_sources_for_tests" ALL_AIRFLOW_PYTHON_FILES = "all_airflow_python_files" ALL_AIRFLOW_CTL_PYTHON_FILES = "all_airflow_ctl_python_files" ALL_PROVIDERS_PYTHON_FILES = "all_provider_python_files" + ALL_PROVIDERS_DISTRIBUTION_CONFIG_FILES = "all_provider_distribution_config_files" ALL_DEV_PYTHON_FILES = "all_dev_python_files" ALL_DEVEL_COMMON_PYTHON_FILES = "all_devel_common_python_files" ALL_PROVIDER_YAML_FILES = "all_provider_yaml_files" @@ -170,6 +173,9 @@ def __hash__(self): FileGroupForCi.GIT_PROVIDER_FILES: [ r"^providers/git/src/", ], + FileGroupForCi.STANDARD_PROVIDER_FILES: [ + r"^providers/standard/src/", + ], FileGroupForCi.API_CODEGEN_FILES: [ r"^airflow-core/src/airflow/api_fastapi/core_api/openapi/.*generated\.yaml", r"^clients/gen", @@ -190,10 +196,12 @@ def __hash__(self): r"^airflow-core/src/.*\.py$", r"^airflow-core/docs/", r"^providers/.*/src/", + r"^providers/.*/tests/", r"^providers/.*/docs/", r"^providers-summary-docs", r"^docker-stack-docs", r"^chart", + r"^task-sdk/docs/", r"^task-sdk/src/", r"^airflow-ctl/src/", r"^airflow-core/tests/system", @@ -227,6 +235,10 @@ def __hash__(self): FileGroupForCi.ALL_PROVIDERS_PYTHON_FILES: [ r"^providers/.*\.py$", ], + FileGroupForCi.ALL_PROVIDERS_DISTRIBUTION_CONFIG_FILES: [ + r"^providers/.*/pyproject\.toml$", + r"^providers/.*/provider\.yaml$", + ], FileGroupForCi.ALL_DEV_PYTHON_FILES: [ r"^dev/.*\.py$", ], @@ -262,6 +274,9 @@ def __hash__(self): r"^task-sdk/src/airflow/sdk/.*\.py$", r"^task-sdk/tests/.*\.py$", ], + FileGroupForCi.GO_SDK_FILES: [ + r"^go-sdk/.*\.go$", + ], FileGroupForCi.ASSET_FILES: [ r"^airflow-core/src/airflow/assets/", r"^airflow-core/src/airflow/models/assets/", @@ -543,6 +558,16 @@ def full_tests_needed(self) -> bool: "and for now we have core tests depending on them.[/]" ) return True + if self._matching_files( + FileGroupForCi.STANDARD_PROVIDER_FILES, + CI_FILE_GROUP_MATCHES, + ): + # TODO(potiuk): remove me when we get rid of the dependency + get_console().print( + "[warning]Running full set of tests because standard provider files changed " + "and for now we have core tests depending on them.[/]" + ) + return True if self._matching_files( FileGroupForCi.TESTS_UTILS_FILES, CI_FILE_GROUP_MATCHES, @@ -691,6 +716,9 @@ def mypy_checks(self) -> list[str]: checks_to_run.append("mypy-airflow-core") if ( self._matching_files(FileGroupForCi.ALL_PROVIDERS_PYTHON_FILES, CI_FILE_GROUP_MATCHES) + or self._matching_files( + FileGroupForCi.ALL_PROVIDERS_DISTRIBUTION_CONFIG_FILES, CI_FILE_GROUP_MATCHES + ) or self._are_all_providers_affected() ) and self._default_branch == "main": checks_to_run.append("mypy-providers") @@ -746,7 +774,7 @@ def run_ui_tests(self) -> bool: @cached_property def run_amazon_tests(self) -> bool: - if self.providers_test_types_list_as_strings_in_json is None: + if self.providers_test_types_list_as_strings_in_json == "[]": return False return ( "amazon" in self.providers_test_types_list_as_strings_in_json @@ -757,6 +785,10 @@ def run_amazon_tests(self) -> bool: def run_task_sdk_tests(self) -> bool: return self._should_be_run(FileGroupForCi.TASK_SDK_FILES) + @cached_property + def run_go_sdk_tests(self) -> bool: + return self._should_be_run(FileGroupForCi.GO_SDK_FILES) + @cached_property def run_airflow_ctl_tests(self) -> bool: return self._should_be_run(FileGroupForCi.AIRFLOW_CTL_FILES) @@ -849,12 +881,16 @@ def _get_core_test_types_to_run(self) -> list[str]: all_providers_source_files = self._matching_files( FileGroupForCi.ALL_PROVIDERS_PYTHON_FILES, CI_FILE_GROUP_MATCHES ) + all_providers_distribution_config_files = self._matching_files( + FileGroupForCi.ALL_PROVIDERS_DISTRIBUTION_CONFIG_FILES, CI_FILE_GROUP_MATCHES + ) test_always_files = self._matching_files(FileGroupForCi.ALWAYS_TESTS_FILES, CI_FILE_GROUP_MATCHES) test_ui_files = self._matching_files(FileGroupForCi.UI_FILES, CI_FILE_GROUP_MATCHES) remaining_files = ( set(all_source_files) - set(all_providers_source_files) + - set(all_providers_distribution_config_files) - set(matched_files) - set(kubernetes_files) - set(system_test_files) @@ -897,10 +933,13 @@ def _get_providers_test_types_to_run(self, split_to_individual_providers: bool = all_providers_source_files = self._matching_files( FileGroupForCi.ALL_PROVIDERS_PYTHON_FILES, CI_FILE_GROUP_MATCHES ) + all_providers_distribution_config_files = self._matching_files( + FileGroupForCi.ALL_PROVIDERS_DISTRIBUTION_CONFIG_FILES, CI_FILE_GROUP_MATCHES + ) assets_source_files = self._matching_files(FileGroupForCi.ASSET_FILES, CI_FILE_GROUP_MATCHES) - if ( len(all_providers_source_files) == 0 + and len(all_providers_distribution_config_files) == 0 and len(assets_source_files) == 0 and not self.needs_api_tests ): @@ -969,9 +1008,9 @@ def core_test_types_list_as_strings_in_json(self) -> str | None: return json.dumps(_get_test_list_as_json([current_test_types])) @cached_property - def providers_test_types_list_as_strings_in_json(self) -> str | None: + def providers_test_types_list_as_strings_in_json(self) -> str: if not self.run_tests: - return None + return "[]" current_test_types = set(self._get_providers_test_types_to_run()) if self._default_branch != "main": test_types_to_remove: set[str] = set() @@ -1157,6 +1196,8 @@ def docs_list_as_string(self) -> str | None: packages.append("helm-chart") if any(file.startswith("docker-stack-docs") for file in self._files): packages.append("docker-stack") + if any(file.startswith("task-sdk/src/") for file in self._files): + packages.append("task-sdk") if providers_affected: for provider in providers_affected: packages.append(provider.replace("-", ".")) @@ -1192,6 +1233,9 @@ def skip_pre_commits(self) -> str: "check-airflow-provider-compatibility", "check-extra-packages-references", "check-provider-yaml-valid", + "check-imports-in-providers", + "check-deferrable-default", + "update-providers-build-files", "lint-helm-chart", "validate-operators-init", ) @@ -1215,7 +1259,9 @@ def skip_pre_commits(self) -> str: ): pre_commits_to_skip.add("lint-helm-chart") if not ( - self._matching_files(FileGroupForCi.ALL_PROVIDER_YAML_FILES, CI_FILE_GROUP_MATCHES) + self._matching_files( + FileGroupForCi.ALL_PROVIDERS_DISTRIBUTION_CONFIG_FILES, CI_FILE_GROUP_MATCHES + ) or self._matching_files(FileGroupForCi.ALL_PROVIDERS_PYTHON_FILES, CI_FILE_GROUP_MATCHES) ): # only skip provider validation if none of the provider.yaml and provider @@ -1419,6 +1465,7 @@ def _find_caplog_in_def(cls, added_lines): return True line_counter += 1 line_counter += 1 + return None def _caplog_exists_in_added_lines(self) -> bool: """ @@ -1427,7 +1474,7 @@ def _caplog_exists_in_added_lines(self) -> bool: :return: True if caplog is used in added lines else False """ lines = run_command( - ["git", "diff", f"{self._commit_ref}"], + ["git", "diff", f"{self._commit_ref}^"], capture_output=True, text=True, cwd=AIRFLOW_ROOT_PATH, @@ -1438,7 +1485,7 @@ def _caplog_exists_in_added_lines(self) -> bool: return False added_caplog_lines = [ - line.lstrip().lstrip("+ ") for line in lines.stdout.split("\n") if line.lstrip().startswith("+ ") + line.lstrip().lstrip("+") for line in lines.stdout.split("\n") if line.lstrip().startswith("+") ] return self._find_caplog_in_def(added_lines=added_caplog_lines) diff --git a/dev/breeze/src/airflow_breeze/utils/version_utils.py b/dev/breeze/src/airflow_breeze/utils/version_utils.py index eabee9a4b44ef..d6e290097838f 100644 --- a/dev/breeze/src/airflow_breeze/utils/version_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/version_utils.py @@ -17,28 +17,6 @@ from __future__ import annotations -def get_latest_helm_chart_version(): - import requests - - response = requests.get("https://airflow.apache.org/_gen/packages-metadata.json") - data = response.json() - for package in data: - if package["package-name"] == "helm-chart": - stable_version = package["stable-version"] - return stable_version - - -def get_latest_airflow_version(): - import requests - - response = requests.get( - "https://pypi.org/pypi/apache-airflow/json", headers={"User-Agent": "Python requests"} - ) - response.raise_for_status() - latest_released_version = response.json()["info"]["version"] - return latest_released_version - - def remove_local_version_suffix(version_suffix: str) -> str: if "+" in version_suffix: return version_suffix.split("+")[0] diff --git a/dev/breeze/tests/test_packages.py b/dev/breeze/tests/test_packages.py index da23d0e4d207f..420761a2995e6 100644 --- a/dev/breeze/tests/test_packages.py +++ b/dev/breeze/tests/test_packages.py @@ -36,8 +36,6 @@ get_long_package_name, get_min_airflow_version, get_pip_package_name, - get_previous_documentation_distribution_path, - get_previous_source_providers_distribution_path, get_provider_details, get_provider_info_dict, get_provider_requirements, @@ -47,7 +45,7 @@ get_suspended_provider_ids, validate_provider_info_with_runtime_schema, ) -from airflow_breeze.utils.path_utils import AIRFLOW_ROOT_PATH, DOCS_ROOT +from airflow_breeze.utils.path_utils import AIRFLOW_ROOT_PATH def test_get_available_packages(): @@ -55,19 +53,6 @@ def test_get_available_packages(): assert all(package not in REGULAR_DOC_PACKAGES for package in get_available_distributions()) -def test_get_source_package_path(): - assert get_previous_source_providers_distribution_path("apache.hdfs") == AIRFLOW_ROOT_PATH.joinpath( - "providers", "src", "airflow", "providers", "apache", "hdfs" - ) - - -def test_get_old_documentation_package_path(): - assert ( - get_previous_documentation_distribution_path("apache.hdfs") - == DOCS_ROOT / "apache-airflow-providers-apache-hdfs" - ) - - def test_expand_all_provider_distributions(): assert len(expand_all_provider_distributions(("all-providers",))) > 70 @@ -258,14 +243,13 @@ def test_convert_pip_requirements_to_table(requirements: Iterable[str], markdown def test_validate_provider_info_with_schema(): for provider in get_available_distributions(): - print("Validating provider:", provider) validate_provider_info_with_runtime_schema(get_provider_info_dict(provider)) @pytest.mark.parametrize( "provider_id, min_version", [ - ("amazon", "2.9.0"), + ("amazon", "2.10.0"), ("fab", "3.0.2"), ], ) diff --git a/dev/breeze/tests/test_provider_documentation.py b/dev/breeze/tests/test_provider_documentation.py index c7035a8ffb61d..43a058c998112 100644 --- a/dev/breeze/tests/test_provider_documentation.py +++ b/dev/breeze/tests/test_provider_documentation.py @@ -33,6 +33,7 @@ _get_change_from_line, _get_changes_classified, _get_git_log_command, + get_most_impactful_change, get_version_tag, ) @@ -317,3 +318,76 @@ def test_version_bump_for_provider_documentation(initial_version, bump_index, ex result = bump_version(Version(initial_version), bump_index) assert str(result) == expected_version + + +@pytest.mark.parametrize( + "changes, expected", + [ + pytest.param([TypeOfChange.SKIP], TypeOfChange.SKIP, id="only-skip"), + pytest.param([TypeOfChange.DOCUMENTATION], TypeOfChange.DOCUMENTATION, id="only-doc"), + pytest.param([TypeOfChange.MISC], TypeOfChange.MISC, id="only-misc"), + pytest.param([TypeOfChange.BUGFIX], TypeOfChange.BUGFIX, id="only-bugfix"), + pytest.param( + [TypeOfChange.MIN_AIRFLOW_VERSION_BUMP], + TypeOfChange.MIN_AIRFLOW_VERSION_BUMP, + id="only-min-airflow-bump", + ), + pytest.param([TypeOfChange.FEATURE], TypeOfChange.FEATURE, id="only-feature"), + pytest.param([TypeOfChange.BREAKING_CHANGE], TypeOfChange.BREAKING_CHANGE, id="only-breaking"), + pytest.param( + [TypeOfChange.SKIP, TypeOfChange.DOCUMENTATION], TypeOfChange.DOCUMENTATION, id="doc-vs-skip" + ), + pytest.param([TypeOfChange.SKIP, TypeOfChange.MISC], TypeOfChange.MISC, id="misc-vs-skip"), + pytest.param([TypeOfChange.DOCUMENTATION, TypeOfChange.MISC], TypeOfChange.MISC, id="misc-vs-doc"), + pytest.param([TypeOfChange.MISC, TypeOfChange.BUGFIX], TypeOfChange.BUGFIX, id="bugfix-vs-misc"), + pytest.param( + [TypeOfChange.BUGFIX, TypeOfChange.MIN_AIRFLOW_VERSION_BUMP], + TypeOfChange.MIN_AIRFLOW_VERSION_BUMP, + id="bump-vs-bugfix", + ), + pytest.param( + [TypeOfChange.MIN_AIRFLOW_VERSION_BUMP, TypeOfChange.FEATURE], + TypeOfChange.FEATURE, + id="feature-vs-bump", + ), + pytest.param( + [TypeOfChange.FEATURE, TypeOfChange.BREAKING_CHANGE], + TypeOfChange.BREAKING_CHANGE, + id="breaking-vs-feature", + ), + # Bigger combos + pytest.param( + [ + TypeOfChange.SKIP, + TypeOfChange.DOCUMENTATION, + TypeOfChange.MISC, + TypeOfChange.BUGFIX, + TypeOfChange.MIN_AIRFLOW_VERSION_BUMP, + TypeOfChange.FEATURE, + TypeOfChange.BREAKING_CHANGE, + ], + TypeOfChange.BREAKING_CHANGE, + id="full-spectrum", + ), + pytest.param( + [ + TypeOfChange.DOCUMENTATION, + TypeOfChange.BUGFIX, + TypeOfChange.MIN_AIRFLOW_VERSION_BUMP, + ], + TypeOfChange.MIN_AIRFLOW_VERSION_BUMP, + id="version-bump-over-bugfix-doc", + ), + pytest.param( + [ + TypeOfChange.DOCUMENTATION, + TypeOfChange.MISC, + TypeOfChange.SKIP, + ], + TypeOfChange.MISC, + id="misc-over-doc-skip", + ), + ], +) +def test_get_most_impactful_change(changes, expected): + assert get_most_impactful_change(changes) == expected diff --git a/dev/breeze/tests/test_publish_docs_to_s3.py b/dev/breeze/tests/test_publish_docs_to_s3.py new file mode 100644 index 0000000000000..86c1edbe567d2 --- /dev/null +++ b/dev/breeze/tests/test_publish_docs_to_s3.py @@ -0,0 +1,271 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from unittest.mock import MagicMock, PropertyMock, mock_open, patch + +import pytest + +from airflow_breeze.utils.publish_docs_to_s3 import S3DocsPublish + + +class TestPublishDocsToS3: + def setup_method(self): + self.publish_docs_to_s3 = S3DocsPublish( + source_dir_path="source_dir_path", + destination_location="destination_location", + exclude_docs="exclude_docs", + dry_run=False, + overwrite=False, + parallelism=1, + ) + + @patch("os.listdir") + def test_get_all_docs(self, mock_listdir): + mock_listdir.return_value = ["apache-airflow-providers-amazon"] + assert self.publish_docs_to_s3.get_all_docs == ["apache-airflow-providers-amazon"] + + def test_get_all_docs_exception(self): + with patch("os.listdir", side_effect=FileNotFoundError): + with pytest.raises(SystemExit): + self.publish_docs_to_s3.get_all_docs() + + def test_get_all_excluded_docs(self): + self.publish_docs_to_s3.exclude_docs = "amazon,google,apache-airflow" + assert self.publish_docs_to_s3.get_all_excluded_docs == ["amazon", "google", "apache-airflow"] + + @patch("os.listdir") + def test_get_all_eligible_docs(self, mock_listdir): + mock_listdir.return_value = [ + "apache-airflow-providers-amazon", + "apache-airflow-providers-google", + "apache-airflow", + "docker-stack", + "apache-airflow-providers-apache-kafka", + "apache-airflow-providers-apache-cassandra", + "helm-chart", + ] + + self.publish_docs_to_s3.exclude_docs = "amazon,docker-stack,apache.kafka" + + assert sorted(self.publish_docs_to_s3.get_all_eligible_docs) == sorted( + [ + "apache-airflow-providers-google", + "apache-airflow", + "apache-airflow-providers-apache-cassandra", + "helm-chart", + ] + ) + + @patch("os.listdir") + def test_get_all_eligible_docs_should_raise_when_empty(self, mock_listdir): + mock_listdir.return_value = [ + "apache-airflow-providers-amazon", + "apache-airflow", + "apache-airflow-providers-apache-kafka", + ] + self.publish_docs_to_s3.exclude_docs = "amazon,apache-airflow,apache.kafka" + + with pytest.raises(SystemExit): + self.publish_docs_to_s3.get_all_eligible_docs + + @pytest.mark.parametrize( + "all_eligible_docs, doc_exists, overwrite, expected_source_dest_mapping", + [ + ( + ["apache-airflow-providers-amazon", "apache-airflow-providers-google", "apache-airflow"], + False, + False, + [ + ( + "/tmp/docs-archive/apache-airflow-providers-amazon/1.0.0/", + "s3://dummy-docs/docs/apache-airflow-providers-amazon/1.0.0/", + ), + ( + "/tmp/docs-archive/apache-airflow-providers-amazon/1.0.0/", + "s3://dummy-docs/docs/apache-airflow-providers-amazon/stable/", + ), + ( + "/tmp/docs-archive/apache-airflow-providers-google/1.0.0/", + "s3://dummy-docs/docs/apache-airflow-providers-google/1.0.0/", + ), + ( + "/tmp/docs-archive/apache-airflow-providers-google/1.0.0/", + "s3://dummy-docs/docs/apache-airflow-providers-google/stable/", + ), + ("/tmp/docs-archive/apache-airflow/1.0.0/", "s3://dummy-docs/docs/apache-airflow/1.0.0/"), + ( + "/tmp/docs-archive/apache-airflow/1.0.0/", + "s3://dummy-docs/docs/apache-airflow/stable/", + ), + ], + ), + ( + ["apache-airflow-providers-amazon", "apache-airflow-providers-google", "apache-airflow"], + True, + False, + [], + ), + ( + ["apache-airflow-providers-amazon", "apache-airflow-providers-google", "apache-airflow"], + True, + True, + [ + ( + "/tmp/docs-archive/apache-airflow-providers-amazon/1.0.0/", + "s3://dummy-docs/docs/apache-airflow-providers-amazon/1.0.0/", + ), + ( + "/tmp/docs-archive/apache-airflow-providers-amazon/1.0.0/", + "s3://dummy-docs/docs/apache-airflow-providers-amazon/stable/", + ), + ( + "/tmp/docs-archive/apache-airflow-providers-google/1.0.0/", + "s3://dummy-docs/docs/apache-airflow-providers-google/1.0.0/", + ), + ( + "/tmp/docs-archive/apache-airflow-providers-google/1.0.0/", + "s3://dummy-docs/docs/apache-airflow-providers-google/stable/", + ), + ("/tmp/docs-archive/apache-airflow/1.0.0/", "s3://dummy-docs/docs/apache-airflow/1.0.0/"), + ( + "/tmp/docs-archive/apache-airflow/1.0.0/", + "s3://dummy-docs/docs/apache-airflow/stable/", + ), + ], + ), + ( + [], + True, + False, + [], + ), + ], + ids=[ + "no_doc_version_exists_in_destination", + "doc_version_exists_in_destination", + "overwrite_existing_doc", + "no_docs_to_publish", + ], + ) + @patch.object(S3DocsPublish, "run_publish") + @patch("builtins.open", new_callable=mock_open, read_data="1.0.0") + @patch.object(S3DocsPublish, "get_all_eligible_docs", new_callable=PropertyMock) + @patch("os.path.exists") + @patch.object(S3DocsPublish, "doc_exists") + def test_publish_stable_version_docs( + self, + mock_doc_exists, + mock_path_exists, + mock_get_all_eligible_docs, + mock_open, + mock_run_publish, + all_eligible_docs, + doc_exists, + overwrite, + expected_source_dest_mapping, + ): + mock_path_exists.return_value = True + mock_doc_exists.return_value = doc_exists + mock_get_all_eligible_docs.return_value = all_eligible_docs + self.publish_docs_to_s3.overwrite = overwrite + self.publish_docs_to_s3.source_dir_path = "/tmp/docs-archive" + self.publish_docs_to_s3.destination_location = "s3://dummy-docs/docs" + mock_run_publish.return_value = MagicMock() + self.publish_docs_to_s3.publish_stable_version_docs() + + assert self.publish_docs_to_s3.source_dest_mapping == expected_source_dest_mapping + + @pytest.mark.parametrize( + "all_eligible_docs, doc_exists, overwrite, expected_source_dest_mapping", + [ + ( + ["apache-airflow-providers-amazon", "apache-airflow-providers-google", "apache-airflow"], + False, + False, + [ + ( + "/tmp/docs-archive/apache-airflow-providers-amazon/", + "s3://dummy-docs/docs/apache-airflow-providers-amazon/", + ), + ( + "/tmp/docs-archive/apache-airflow-providers-google/", + "s3://dummy-docs/docs/apache-airflow-providers-google/", + ), + ("/tmp/docs-archive/apache-airflow/", "s3://dummy-docs/docs/apache-airflow/"), + ], + ), + ( + ["apache-airflow-providers-amazon", "apache-airflow-providers-google", "apache-airflow"], + True, + False, + [], + ), + ( + ["apache-airflow-providers-amazon", "apache-airflow-providers-google", "apache-airflow"], + True, + True, + [ + ( + "/tmp/docs-archive/apache-airflow-providers-amazon/", + "s3://dummy-docs/docs/apache-airflow-providers-amazon/", + ), + ( + "/tmp/docs-archive/apache-airflow-providers-google/", + "s3://dummy-docs/docs/apache-airflow-providers-google/", + ), + ("/tmp/docs-archive/apache-airflow/", "s3://dummy-docs/docs/apache-airflow/"), + ], + ), + ( + [], + True, + False, + [], + ), + ], + ids=[ + "no_doc_version_exists_in_destination", + "doc_version_exists_in_destination", + "overwrite_existing_doc", + "no_docs_to_publish", + ], + ) + @patch.object(S3DocsPublish, "run_publish") + @patch.object(S3DocsPublish, "get_all_eligible_docs", new_callable=PropertyMock) + @patch.object(S3DocsPublish, "doc_exists") + def test_publish_all_docs( + self, + mock_doc_exists, + mock_get_all_eligible_docs, + mock_run_publish, + all_eligible_docs, + doc_exists, + overwrite, + expected_source_dest_mapping, + ): + mock_doc_exists.return_value = doc_exists + mock_get_all_eligible_docs.return_value = all_eligible_docs + + self.publish_docs_to_s3.overwrite = overwrite + self.publish_docs_to_s3.source_dir_path = "/tmp/docs-archive" + self.publish_docs_to_s3.destination_location = "s3://dummy-docs/docs" + mock_run_publish.return_value = MagicMock() + self.publish_docs_to_s3.publish_all_docs() + + assert self.publish_docs_to_s3.source_dest_mapping == expected_source_dest_mapping diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index 00acb4634f356..3adeb4810a840 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -347,48 +347,6 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): id="All tests should run when API test files change", ) ), - ( - pytest.param( - ("providers/standard/src/airflow/providers/standard/operators/python.py",), - { - "selected-providers-list-as-string": None, - "all-python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']", - "all-python-versions-list-as-string": DEFAULT_PYTHON_MAJOR_MINOR_VERSION, - "python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']", - "python-versions-list-as-string": DEFAULT_PYTHON_MAJOR_MINOR_VERSION, - "ci-image-build": "true", - "prod-image-build": "false", - "needs-helm-tests": "false", - "run-tests": "true", - "run-amazon-tests": "false", - "docs-build": "true", - "skip-pre-commits": ALL_SKIPPED_COMMITS_IF_NO_UI_AND_HELM_TESTS, - "upgrade-to-newer-dependencies": "false", - "core-test-types-list-as-strings-in-json": json.dumps( - [{"description": "Always", "test_types": "Always"}] - ), - "providers-test-types-list-as-strings-in-json": json.dumps( - [ - { - "description": "common.compat...standard", - "test_types": "Providers[common.compat] Providers[standard]", - } - ] - ), - "individual-providers-test-types-list-as-strings-in-json": json.dumps( - [ - { - "description": "common.compat...standard", - "test_types": "Providers[common.compat] Providers[standard]", - } - ] - ), - "needs-mypy": "true", - "mypy-checks": "['mypy-providers']", - }, - id="Only Python tests", - ) - ), ( pytest.param( ("airflow-core/src/airflow/serialization/python.py",), @@ -461,7 +419,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "needs-helm-tests": "false", "run-tests": "true", "run-amazon-tests": "false", - "docs-build": "false", + "docs-build": "true", "skip-pre-commits": ALL_SKIPPED_COMMITS_IF_NO_UI_AND_HELM_TESTS, "run-kubernetes-tests": "false", "upgrade-to-newer-dependencies": "false", @@ -505,7 +463,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "needs-helm-tests": "false", "run-tests": "true", "run-amazon-tests": "false", - "docs-build": "false", + "docs-build": "true", "skip-pre-commits": ALL_SKIPPED_COMMITS_IF_NO_UI_AND_HELM_TESTS, "run-kubernetes-tests": "false", "upgrade-to-newer-dependencies": "false", @@ -553,7 +511,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "needs-helm-tests": "false", "run-tests": "true", "run-amazon-tests": "false", - "docs-build": "false", + "docs-build": "true", "skip-pre-commits": ALL_SKIPPED_COMMITS_IF_NO_UI_AND_HELM_TESTS, "run-kubernetes-tests": "false", "upgrade-to-newer-dependencies": "false", @@ -601,7 +559,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "needs-helm-tests": "false", "run-tests": "true", "run-amazon-tests": "false", - "docs-build": "false", + "docs-build": "true", "skip-pre-commits": ALL_SKIPPED_COMMITS_IF_NO_UI_AND_HELM_TESTS, "run-kubernetes-tests": "false", "upgrade-to-newer-dependencies": "false", @@ -769,7 +727,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "test_types": "Providers[amazon] Providers[apache.livy]", }, {"description": "dbt.cloud", "test_types": "Providers[dbt.cloud]"}, - {"description": "dingding", "test_types": "Providers[dingding]"}, + { + "description": "dingding", + "test_types": "Providers[dingding]", + }, {"description": "discord", "test_types": "Providers[discord]"}, {"description": "http", "test_types": "Providers[http]"}, ] @@ -939,9 +900,11 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): id="Providers tests run including amazon tests if amazon provider files changed", ), pytest.param( - ("providers/airbyte/tests/airbyte/__init__.py",), + ("providers/amazon/src/airflow/providers/amazon/pyproject.toml",), { - "selected-providers-list-as-string": "airbyte", + "selected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " + "common.compat common.messaging common.sql exasol ftp google http imap microsoft.azure " + "mongo mysql openlineage postgres salesforce ssh teradata", "all-python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']", "all-python-versions-list-as-string": DEFAULT_PYTHON_MAJOR_MINOR_VERSION, "python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']", @@ -950,24 +913,33 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "prod-image-build": "false", "needs-helm-tests": "false", "run-tests": "true", - "run-amazon-tests": "false", - "docs-build": "false", - "skip-pre-commits": ALL_SKIPPED_COMMITS_IF_NO_UI_AND_HELM_TESTS, + "docs-build": "true", + # no python files changed so flynt should not run + "skip-pre-commits": "flynt," + ALL_SKIPPED_COMMITS_IF_NO_UI_AND_HELM_TESTS, "run-kubernetes-tests": "false", "upgrade-to-newer-dependencies": "false", + "run-amazon-tests": "true", "core-test-types-list-as-strings-in-json": json.dumps( [{"description": "Always", "test_types": "Always"}] ), "providers-test-types-list-as-strings-in-json": json.dumps( - [{"description": "airbyte", "test_types": "Providers[airbyte]"}] + [ + { + "description": "amazon...google", + "test_types": "Providers[amazon] Providers[apache.hive,cncf.kubernetes," + "common.compat,common.messaging,common.sql,exasol,ftp,http,imap," + "microsoft.azure,mongo,mysql,openlineage,postgres,salesforce,ssh,teradata] " + "Providers[google]", + } + ] ), "needs-mypy": "true", "mypy-checks": "['mypy-providers']", }, - id="Providers tests run without amazon tests if no amazon file changed", + id="Providers tests run including amazon tests if only amazon pyproject.toml files changed", ), pytest.param( - ("providers/amazon/src/airflow/providers/amazon/file.py",), + ("providers/amazon/src/airflow/providers/amazon/provider.yaml",), { "selected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " "common.compat common.messaging common.sql exasol ftp google http imap microsoft.azure " @@ -980,11 +952,12 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "prod-image-build": "false", "needs-helm-tests": "false", "run-tests": "true", - "run-amazon-tests": "true", "docs-build": "true", - "skip-pre-commits": ALL_SKIPPED_COMMITS_IF_NO_UI_AND_HELM_TESTS, + # no python files changed so flynt should not run + "skip-pre-commits": "flynt," + ALL_SKIPPED_COMMITS_IF_NO_UI_AND_HELM_TESTS, "run-kubernetes-tests": "false", "upgrade-to-newer-dependencies": "false", + "run-amazon-tests": "true", "core-test-types-list-as-strings-in-json": json.dumps( [{"description": "Always", "test_types": "Always"}] ), @@ -1002,16 +975,12 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "needs-mypy": "true", "mypy-checks": "['mypy-providers']", }, - id="Providers tests run including amazon tests if amazon provider files changed", + id="Providers tests run including amazon tests if only amazon provider.yaml files changed", ), pytest.param( - ( - "airflow-core/tests/unit/always/test_project_structure.py", - "providers/common/io/tests/operators/__init__.py", - "providers/common/io/tests/operators/test_file_transfer.py", - ), + ("providers/airbyte/tests/airbyte/__init__.py",), { - "selected-providers-list-as-string": "common.compat common.io openlineage", + "selected-providers-list-as-string": "airbyte", "all-python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']", "all-python-versions-list-as-string": DEFAULT_PYTHON_MAJOR_MINOR_VERSION, "python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']", @@ -1021,30 +990,27 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "needs-helm-tests": "false", "run-tests": "true", "run-amazon-tests": "false", - "docs-build": "false", - "run-kubernetes-tests": "false", + "docs-build": "true", "skip-pre-commits": ALL_SKIPPED_COMMITS_IF_NO_UI_AND_HELM_TESTS, + "run-kubernetes-tests": "false", "upgrade-to-newer-dependencies": "false", "core-test-types-list-as-strings-in-json": json.dumps( [{"description": "Always", "test_types": "Always"}] ), "providers-test-types-list-as-strings-in-json": json.dumps( - [ - { - "description": "common.compat,common.io,openl", - "test_types": "Providers[common.compat,common.io,openlineage]", - } - ] + [{"description": "airbyte", "test_types": "Providers[airbyte]"}] ), "needs-mypy": "true", - "mypy-checks": "['mypy-airflow-core', 'mypy-providers']", + "mypy-checks": "['mypy-providers']", }, - id="Only Always and common providers tests should run when only common.io and tests/always changed", + id="Providers tests run without amazon tests if no amazon file changed", ), pytest.param( - ("providers/standard/src/airflow/providers/standard/operators/bash.py",), + ("providers/amazon/src/airflow/providers/amazon/file.py",), { - "selected-providers-list-as-string": "common.compat standard", + "selected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " + "common.compat common.messaging common.sql exasol ftp google http imap microsoft.azure " + "mongo mysql openlineage postgres salesforce ssh teradata", "all-python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']", "all-python-versions-list-as-string": DEFAULT_PYTHON_MAJOR_MINOR_VERSION, "python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']", @@ -1053,31 +1019,38 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "prod-image-build": "false", "needs-helm-tests": "false", "run-tests": "true", - "run-amazon-tests": "false", + "run-amazon-tests": "true", "docs-build": "true", - "run-kubernetes-tests": "false", "skip-pre-commits": ALL_SKIPPED_COMMITS_IF_NO_UI_AND_HELM_TESTS, + "run-kubernetes-tests": "false", "upgrade-to-newer-dependencies": "false", "core-test-types-list-as-strings-in-json": json.dumps( - [{"description": "Always...Serialization", "test_types": "Always Core Serialization"}] + [{"description": "Always", "test_types": "Always"}] ), "providers-test-types-list-as-strings-in-json": json.dumps( [ { - "description": "common.compat...standard", - "test_types": "Providers[common.compat] Providers[standard]", + "description": "amazon...google", + "test_types": "Providers[amazon] Providers[apache.hive,cncf.kubernetes," + "common.compat,common.messaging,common.sql,exasol,ftp,http,imap," + "microsoft.azure,mongo,mysql,openlineage,postgres,salesforce,ssh,teradata] " + "Providers[google]", } ] ), "needs-mypy": "true", "mypy-checks": "['mypy-providers']", }, - id="Providers standard tests and Serialization tests to run when airflow bash.py changed", + id="Providers tests run including amazon tests if amazon provider files changed", ), pytest.param( - ("providers/standard/src/airflow/providers/standard/operators/bash.py",), + ( + "airflow-core/tests/unit/always/test_project_structure.py", + "providers/common/io/tests/operators/__init__.py", + "providers/common/io/tests/operators/test_file_transfer.py", + ), { - "selected-providers-list-as-string": None, + "selected-providers-list-as-string": "common.compat common.io openlineage", "all-python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']", "all-python-versions-list-as-string": DEFAULT_PYTHON_MAJOR_MINOR_VERSION, "python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']", @@ -1092,20 +1065,43 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "skip-pre-commits": ALL_SKIPPED_COMMITS_IF_NO_UI_AND_HELM_TESTS, "upgrade-to-newer-dependencies": "false", "core-test-types-list-as-strings-in-json": json.dumps( - [{"description": "Always...Serialization", "test_types": "Always Core Serialization"}] + [{"description": "Always", "test_types": "Always"}] ), "providers-test-types-list-as-strings-in-json": json.dumps( [ { - "description": "common.compat...standard", - "test_types": "Providers[common.compat] Providers[standard]", + "description": "common.compat,common.io,openl", + "test_types": "Providers[common.compat,common.io,openlineage]", } ] ), "needs-mypy": "true", - "mypy-checks": "['mypy-providers']", + "mypy-checks": "['mypy-airflow-core', 'mypy-providers']", }, - id="Force Core and Serialization tests to run when tests bash changed", + id="Only Always and common providers tests should run when only common.io and tests/always changed", + ), + pytest.param( + ("providers/standard/src/airflow/providers/standard/operators/bash.py",), + { + "all-python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']", + "all-python-versions-list-as-string": DEFAULT_PYTHON_MAJOR_MINOR_VERSION, + "python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']", + "python-versions-list-as-string": DEFAULT_PYTHON_MAJOR_MINOR_VERSION, + "ci-image-build": "true", + "prod-image-build": "true", + "needs-helm-tests": "true", + "run-tests": "true", + "run-amazon-tests": "true", + "docs-build": "true", + "run-kubernetes-tests": "true", + "skip-pre-commits": ALL_SKIPPED_COMMITS_BY_DEFAULT_ON_ALL_TESTS_NEEDED, + "upgrade-to-newer-dependencies": "false", + "core-test-types-list-as-strings-in-json": ALL_CI_SELECTIVE_TEST_TYPES_AS_JSON, + "providers-test-types-list-as-strings-in-json": ALL_PROVIDERS_SELECTIVE_TEST_TYPES_AS_JSON, + "needs-mypy": "true", + "mypy-checks": ALL_MYPY_CHECKS, + }, + id="All tests to run when standard operator changed", ), ( pytest.param( @@ -1154,8 +1150,8 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "core-test-types-list-as-strings-in-json": ALL_CI_SELECTIVE_TEST_TYPES_AS_JSON, "providers-test-types-list-as-strings-in-json": ALL_PROVIDERS_SELECTIVE_TEST_TYPES_AS_JSON, "testable-core-integrations": "['kerberos']", - "testable-providers-integrations": "['celery', 'cassandra', 'drill', 'kafka', 'mongo', " - "'pinot', 'qdrant', 'redis', 'trino', 'ydb']", + "testable-providers-integrations": "['celery', 'cassandra', 'drill', 'gremlin', 'kafka', " + "'mongo', 'pinot', 'qdrant', 'redis', 'trino', 'ydb']", "needs-mypy": "true", "mypy-checks": ALL_MYPY_CHECKS, }, @@ -1257,6 +1253,11 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): }, id="Run docs-build for SECURITY.md", ), + pytest.param( + ("go-sdk/sdk/variable.go",), + {"run-go-sdk-tests": "true"}, + id="Run go tests for go-sdk", + ), ], ) def test_expected_output_pull_request_main( @@ -2052,7 +2053,7 @@ def test_expected_output_push( "run-tests": "true", "skip-providers-tests": "false", "docs-build": "true", - "docs-list-as-string": "apache-airflow amazon common.compat common.io common.sql " + "docs-list-as-string": "apache-airflow task-sdk amazon common.compat common.io common.sql " "dbt.cloud ftp google microsoft.mssql mysql " "openlineage postgres sftp snowflake trino", "skip-pre-commits": ALL_SKIPPED_COMMITS_ON_NO_CI_IMAGE, diff --git a/pyproject.toml b/pyproject.toml index 3002e6ab4d80d..50c98e6204bfa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -278,7 +278,7 @@ packages = [] "apache-airflow-providers-openfaas>=3.7.0" ] "openlineage" = [ - "apache-airflow-providers-openlineage>=2.1.3" # Set from MIN_VERSION_OVERRIDE in update_airflow_pyproject_toml.py + "apache-airflow-providers-openlineage>=2.3.0" # Set from MIN_VERSION_OVERRIDE in update_airflow_pyproject_toml.py ] "opensearch" = [ "apache-airflow-providers-opensearch>=1.5.0" @@ -441,7 +441,7 @@ packages = [] "apache-airflow-providers-odbc>=4.8.0", "apache-airflow-providers-openai>=1.5.0", "apache-airflow-providers-openfaas>=3.7.0", - "apache-airflow-providers-openlineage>=2.1.3", # Set from MIN_VERSION_OVERRIDE in update_airflow_pyproject_toml.py + "apache-airflow-providers-openlineage>=2.3.0", # Set from MIN_VERSION_OVERRIDE in update_airflow_pyproject_toml.py "apache-airflow-providers-opensearch>=1.5.0", "apache-airflow-providers-opsgenie>=5.8.0", "apache-airflow-providers-oracle>=3.12.0", diff --git a/scripts/ci/airflow_version_check.py b/scripts/ci/airflow_version_check.py index 8c040e13d024b..45354d298847f 100755 --- a/scripts/ci/airflow_version_check.py +++ b/scripts/ci/airflow_version_check.py @@ -31,7 +31,7 @@ from pathlib import Path import requests -from packaging.version import Version +from packaging.version import Version, parse from rich.console import Console console = Console(color_system="standard", stderr=True, width=400) @@ -45,7 +45,6 @@ def check_airflow_version(airflow_version: Version) -> tuple[str, bool]: returns: tuple containing the version and a boolean indicating if it's latest. """ latest = False - max_versions_shown = 30 try: response = requests.get( "https://pypi.org/pypi/apache-airflow/json", headers={"User-Agent": "Python requests"} @@ -53,12 +52,14 @@ def check_airflow_version(airflow_version: Version) -> tuple[str, bool]: response.raise_for_status() data = response.json() latest_version = Version(data["info"]["version"]) - valid_versions = list(reversed(data["releases"].keys()))[:max_versions_shown] - if str(airflow_version) not in valid_versions: - console.print(f"[red]Version {airflow_version} is not a valid Airflow version") - console.print( - f"Available versions: (first available {max_versions_shown} versions):", valid_versions - ) + all_versions = sorted( + (parse(v) for v in data["releases"].keys()), + reverse=True, + ) + if airflow_version not in all_versions: + console.print(f"[red]Version {airflow_version} is not a valid Airflow release version.") + console.print("[yellow]Available versions (latest 30 shown):") + console.print([str(v) for v in all_versions[:30]]) sys.exit(1) if airflow_version == latest_version: latest = True diff --git a/scripts/ci/docker-compose/ci-uv-tests.yml b/scripts/ci/docker-compose/ci-tests.yml similarity index 93% rename from scripts/ci/docker-compose/ci-uv-tests.yml rename to scripts/ci/docker-compose/ci-tests.yml index e293a8ceef9cb..3e962a055320d 100644 --- a/scripts/ci/docker-compose/ci-uv-tests.yml +++ b/scripts/ci/docker-compose/ci-tests.yml @@ -19,5 +19,5 @@ services: airflow: volumes: # We should be ok with sharing the cache between the builds - now that we are using uv - # The cache should be safe to share between parallel builds as UV is build to support it. - - /mnt/.cache:/root/.cache + # The cache should be safe to share between parallel builds. + - /mnt/.cache/uv:/root/.cache/uv diff --git a/scripts/ci/docker-compose/devcontainer.env b/scripts/ci/docker-compose/devcontainer.env index 03e301fdeb458..8f7ae95aba554 100644 --- a/scripts/ci/docker-compose/devcontainer.env +++ b/scripts/ci/docker-compose/devcontainer.env @@ -45,6 +45,7 @@ HOST_GROUP_ID= HOST_OS="linux" INIT_SCRIPT_FILE="init.sh" INSTALL_AIRFLOW_VERSION= +INSTALL_AIRFLOW_WITH_CONSTRAINTS= AIRFLOW_CONSTRAINTS_MODE= INSTALL_SELECTED_PROVIDERS= USE_AIRFLOW_VERSION= @@ -60,7 +61,6 @@ POSTGRES_VERSION=10 PYTHONDONTWRITEBYTECODE="true" REMOVE_ARM_PACKAGES="false" RUN_TESTS="false" -AIRFLOW_SKIP_CONSTRAINTS="false" SKIP_SSH_SETUP="true" SKIP_ENVIRONMENT_INITIALIZATION="false" START_AIRFLOW="false" diff --git a/scripts/ci/docker-compose/gremlin/graph.properties b/scripts/ci/docker-compose/gremlin/graph.properties new file mode 100644 index 0000000000000..e8550156de7bd --- /dev/null +++ b/scripts/ci/docker-compose/gremlin/graph.properties @@ -0,0 +1,27 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Specify the graph implementation class +gremlin.graph=org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerGraph + +# Optional: Specify a file location for persistent storage. +# Uncomment the following line if you wish to persist graph data between restarts. +gremlin.tinkergraph.graphLocation=/opt/gremlin-server/data/graph.json + +# Configure the ID managers for vertices and edges +gremlin.tinkergraph.vertexIdManager=LONG +gremlin.tinkergraph.graphFormat=graphson diff --git a/scripts/ci/docker-compose/gremlin/gremlin-entrypoint.sh b/scripts/ci/docker-compose/gremlin/gremlin-entrypoint.sh new file mode 100644 index 0000000000000..2c58e7b518c96 --- /dev/null +++ b/scripts/ci/docker-compose/gremlin/gremlin-entrypoint.sh @@ -0,0 +1,42 @@ +#!/bin/sh + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +#!/bin/sh +set -eu + +# Fix permissions on the config directory +echo "Fixing permissions for /opt/gremlin-server/conf..." +chmod -R a+rw /opt/gremlin-server/conf +ls -la /opt/gremlin-server/conf + +# Start Gremlin Server in the background +echo "Starting Gremlin Server on port 8182..." +cd /opt/gremlin-server || exit +./bin/gremlin-server.sh conf/gremlin-server.yaml & + +# Wait for Gremlin to be ready +echo "Waiting for Gremlin Server to start on port 8182..." +while ! nc -z gremlin 8182 2>/dev/null; do + echo "Gremlin still not started" + sleep 5 +done +sleep 3 +echo "Gremlin Server is running" + +# Keep the container running +wait diff --git a/scripts/ci/docker-compose/gremlin/gremlin-server.yaml b/scripts/ci/docker-compose/gremlin/gremlin-server.yaml new file mode 100644 index 0000000000000..da67c35f900f9 --- /dev/null +++ b/scripts/ci/docker-compose/gremlin/gremlin-server.yaml @@ -0,0 +1,63 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +host: gremlin +port: 8182 +channelizer: org.apache.tinkerpop.gremlin.server.channel.WebSocketChannelizer +graphs: { + graph: conf/graph.properties} +scriptEngines: { + gremlin-groovy: { + plugins: { + org.apache.tinkerpop.gremlin.server.jsr223.GremlinServerGremlinPlugin: {}, + org.apache.tinkerpop.gremlin.tinkergraph.jsr223.TinkerGraphGremlinPlugin: {}, + org.apache.tinkerpop.gremlin.jsr223.ImportGremlinPlugin: { + classImports: [java.lang.Math], methodImports: [java.lang.Math#*] + }, + org.apache.tinkerpop.gremlin.jsr223.ScriptFileGremlinPlugin: { + files: [scripts/empty-sample.groovy] + }}}} +serializers: + - {className: org.apache.tinkerpop.gremlin.driver.ser.GraphSONMessageSerializerV2d0, + config: {includeTypes: true}} + - {className: org.apache.tinkerpop.gremlin.driver.ser.GraphSONMessageSerializerV3d0, + config: {ioRegistries: [org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerIoRegistryV3d0]}} + - {className: org.apache.tinkerpop.gremlin.driver.ser.GraphBinaryMessageSerializerV1} + - {className: org.apache.tinkerpop.gremlin.driver.ser.GraphBinaryMessageSerializerV1, + config: {serializeResultToString: true}} +processors: + - {className: org.apache.tinkerpop.gremlin.server.op.session.SessionOpProcessor, + config: {sessionTimeout: 28800000}} + - {className: org.apache.tinkerpop.gremlin.server.op.traversal.TraversalOpProcessor, + config: {cacheExpirationTime: 600000, cacheMaxSize: 1000}} +metrics: { + consoleReporter: {enabled: true, interval: 180000}, + csvReporter: {enabled: true, interval: 180000, fileName: /tmp/gremlin-server-metrics.csv}, + jmxReporter: {enabled: true}, + slf4jReporter: {enabled: true, interval: 180000}} +strictTransactionManagement: false +idleConnectionTimeout: 0 +keepAliveInterval: 0 +maxInitialLineLength: 4096 +maxHeaderSize: 8192 +maxChunkSize: 8192 +maxContentLength: 65536 +maxAccumulationBufferComponents: 1024 +resultIterationBatchSize: 64 +writeBufferLowWaterMark: 32768 +writeBufferHighWaterMark: 65536 +ssl: {enabled: false} diff --git a/scripts/ci/docker-compose/gremlin/log4j-server.properties b/scripts/ci/docker-compose/gremlin/log4j-server.properties new file mode 100644 index 0000000000000..e2d23083842ae --- /dev/null +++ b/scripts/ci/docker-compose/gremlin/log4j-server.properties @@ -0,0 +1,32 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +log4j.rootLogger=INFO, stdout +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%p] %C{1} - %m%n + +log4j.logger.org.apache.tinkerpop.gremlin.driver.Connection=OFF +log4j.logger.org.apache.tinkerpop.gremlin.driver.ConnectionPool=OFF +log4j.logger.org.apache.tinkerpop.gremlin.neo4j.structure.Neo4jGraph=ERROR +log4j.logger.org.apache.hadoop.mapred.JobClient=INFO +log4j.logger.org.apache.hadoop.mapreduce.Job=INFO +log4j.logger.org.apache.tinkerpop.gremlin.hadoop.process.computer.mapreduce.MapReduceGraphComputer=INFO +log4j.logger.org.apache.tinkerpop.gremlin.hadoop.structure.HadoopGraph=INFO +log4j.logger.org.apache.tinkerpop.gremlin.spark.process.computer.SparkGraphComputer=INFO +log4j.logger.org.apache.spark.metrics.MetricsSystem=ERROR +log4j.logger.com.jcabi.manifests.Manifests=OFF diff --git a/scripts/ci/docker-compose/gremlin/logback.xml b/scripts/ci/docker-compose/gremlin/logback.xml new file mode 100644 index 0000000000000..9b85d870a5225 --- /dev/null +++ b/scripts/ci/docker-compose/gremlin/logback.xml @@ -0,0 +1,38 @@ + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + + + + + diff --git a/scripts/ci/docker-compose/integration-gremlin.yml b/scripts/ci/docker-compose/integration-gremlin.yml new file mode 100644 index 0000000000000..27e9783999afd --- /dev/null +++ b/scripts/ci/docker-compose/integration-gremlin.yml @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +services: + gremlin: + hostname: gremlin + container_name: gremlin + image: tinkerpop/gremlin-server:3.4.2 + labels: + breeze.description: "Integration required for gremlin operator and hook." + volumes: + - ./gremlin:/opt/gremlin-server/conf + - graph-data:/opt/gremlin-server/data + - ./gremlin/gremlin-entrypoint.sh:/opt/gremlin-server/gremlin-entrypoint.sh # New entrypoint script + ports: + - "${GREMLIN_HOST_PORT}:8182" + entrypoint: /opt/gremlin-server/gremlin-entrypoint.sh # Use custom entrypoint + user: "0:0" # Run as root + airflow: + depends_on: + - gremlin + environment: + - INTEGRATION_GREMLIN=true + stdin_open: true +volumes: + graph-data: diff --git a/scripts/ci/install_breeze.sh b/scripts/ci/install_breeze.sh index c3b987b67976a..d2ef488d1cca1 100755 --- a/scripts/ci/install_breeze.sh +++ b/scripts/ci/install_breeze.sh @@ -22,7 +22,7 @@ cd "$( dirname "${BASH_SOURCE[0]}" )/../../" PYTHON_ARG="" PIP_VERSION="25.1.1" -UV_VERSION="0.7.8" +UV_VERSION="0.7.14" if [[ ${PYTHON_VERSION=} != "" ]]; then PYTHON_ARG="--python=$(which python"${PYTHON_VERSION}") " fi diff --git a/scripts/ci/pre_commit/boring_cyborg.py b/scripts/ci/pre_commit/boring_cyborg.py index ec674485b5457..95fb1f057c032 100755 --- a/scripts/ci/pre_commit/boring_cyborg.py +++ b/scripts/ci/pre_commit/boring_cyborg.py @@ -31,13 +31,14 @@ CONFIG_KEY = "labelPRBasedOnFilePath" -repo_root = Path(__file__).parent.parent.parent.parent +repo_root = Path(__file__).parents[3] cyborg_config_path = repo_root / ".github" / "boring-cyborg.yml" cyborg_config = yaml.safe_load(cyborg_config_path.read_text()) if CONFIG_KEY not in cyborg_config: raise SystemExit(f"Missing section {CONFIG_KEY}") errors = [] +# Check if all patterns in the cyborg config are existing in the repository for label, patterns in cyborg_config[CONFIG_KEY].items(): for pattern in patterns: try: @@ -49,9 +50,32 @@ f"Unused pattern [{colored(pattern, 'cyan')}] in [{colored(yaml_path, 'cyan')}] section." ) +# Check for missing providers +EXCEPTIONS = ["edge3"] +providers_root = repo_root / "providers" +for p in providers_root.glob("**/provider.yaml"): + provider_name = str(p.parent.relative_to(providers_root)).replace("/", "-") + expected_key = f"provider:{provider_name}" + if provider_name not in EXCEPTIONS and expected_key not in cyborg_config[CONFIG_KEY]: + errors.append( + f"Provider [{colored(provider_name, 'cyan')}] is missing in [{colored(expected_key, 'cyan')}] section." + ) + +# Check for missing translations +EXCEPTIONS = ["en"] +for p in repo_root.glob("airflow-core/src/airflow/ui/public/i18n/locales/*"): + if p.is_dir(): + lang_id = p.name + expected_key = f"translation:{lang_id}" + if lang_id not in EXCEPTIONS and expected_key not in cyborg_config[CONFIG_KEY]: + errors.append( + f"Translation [{colored(lang_id, 'cyan')}] is missing in [{colored(expected_key, 'cyan')}] section." + ) + if errors: print(f"Found {colored(str(len(errors)), 'red')} problems:") print("\n".join(errors)) + print(f"Please correct the above in {cyborg_config_path}") sys.exit(1) else: print("No found problems. Have a good day!") diff --git a/scripts/ci/pre_commit/breeze_cmd_line.py b/scripts/ci/pre_commit/breeze_cmd_line.py index 9e248c9a7b6cd..5e61a586d08c9 100755 --- a/scripts/ci/pre_commit/breeze_cmd_line.py +++ b/scripts/ci/pre_commit/breeze_cmd_line.py @@ -24,10 +24,9 @@ from pathlib import Path sys.path.insert(0, str(Path(__file__).parent.resolve())) -from common_precommit_utils import console, initialize_breeze_precommit +from common_precommit_utils import AIRFLOW_ROOT_PATH, console, initialize_breeze_precommit -AIRFLOW_SOURCES_DIR = Path(__file__).parents[3].resolve() -BREEZE_INSTALL_DIR = AIRFLOW_SOURCES_DIR / "dev" / "breeze" +BREEZE_INSTALL_DIR = AIRFLOW_ROOT_PATH / "dev" / "breeze" BREEZE_DOC_DIR = BREEZE_INSTALL_DIR / "doc" BREEZE_IMAGES_DIR = BREEZE_DOC_DIR / "images" BREEZE_SOURCES_DIR = BREEZE_INSTALL_DIR / "src" diff --git a/scripts/ci/pre_commit/capture_airflowctl_help.py b/scripts/ci/pre_commit/capture_airflowctl_help.py new file mode 100644 index 0000000000000..3e3ddc11d90ff --- /dev/null +++ b/scripts/ci/pre_commit/capture_airflowctl_help.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import sys +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).parent.resolve())) +from common_precommit_utils import ( + initialize_breeze_precommit, + run_command_via_breeze_shell, + validate_cmd_result, +) + +initialize_breeze_precommit(__name__, __file__) + +cmd_result = run_command_via_breeze_shell( + ["python3", "/opt/airflow/scripts/in_container/run_capture_airflowctl_help.py"], + backend="postgres", + skip_environment_initialization=False, +) + +validate_cmd_result(cmd_result) diff --git a/scripts/ci/pre_commit/check_airflow_bug_report_template.py b/scripts/ci/pre_commit/check_airflow_bug_report_template.py index e4bda765cc498..badc8fec658fa 100755 --- a/scripts/ci/pre_commit/check_airflow_bug_report_template.py +++ b/scripts/ci/pre_commit/check_airflow_bug_report_template.py @@ -26,7 +26,7 @@ sys.path.insert(0, str(Path(__file__).parent.resolve())) # make sure common_precommit_utils is imported from common_precommit_utils import AIRFLOW_ROOT_PATH, check_list_sorted, console -BUG_REPORT_TEMPLATE = AIRFLOW_ROOT_PATH / ".github" / "ISSUE_TEMPLATE" / "airflow_providers_bug_report.yml" +BUG_REPORT_TEMPLATE = AIRFLOW_ROOT_PATH / ".github" / "ISSUE_TEMPLATE" / "3-airflow_providers_bug_report.yml" DEPENDENCIES_JSON_FILE_PATH = AIRFLOW_ROOT_PATH / "generated" / "provider_dependencies.json" diff --git a/scripts/ci/pre_commit/check_deferrable_default.py b/scripts/ci/pre_commit/check_deferrable_default.py index a49aed4a6ed64..1bddc96c96b0c 100755 --- a/scripts/ci/pre_commit/check_deferrable_default.py +++ b/scripts/ci/pre_commit/check_deferrable_default.py @@ -106,8 +106,8 @@ def _fix_invalid_deferrable_default_value(module_filename: str) -> None: def main() -> int: modules = itertools.chain( - glob.glob(f"{ROOT_DIR}/airflow/**/sensors/**.py", recursive=True), - glob.glob(f"{ROOT_DIR}/airflow/**/operators/**.py", recursive=True), + glob.glob(f"{ROOT_DIR}/**/sensors/**.py", recursive=True), + glob.glob(f"{ROOT_DIR}/**/operators/**.py", recursive=True), ) errors = [error for module in modules for error in iter_check_deferrable_default_errors(module)] diff --git a/scripts/ci/pre_commit/check_extra_packages_ref.py b/scripts/ci/pre_commit/check_extra_packages_ref.py index f04d66caef6c0..5ba9e68e6e75f 100755 --- a/scripts/ci/pre_commit/check_extra_packages_ref.py +++ b/scripts/ci/pre_commit/check_extra_packages_ref.py @@ -26,6 +26,7 @@ import sys from pathlib import Path +from common_precommit_utils import AIRFLOW_ROOT_PATH from tabulate import tabulate try: @@ -34,7 +35,6 @@ import tomli as tomllib -AIRFLOW_ROOT_PATH = Path(__file__).parents[3].resolve() COMMON_PRECOMMIT_PATH = Path(__file__).parent.resolve() EXTRA_PACKAGES_REF_FILE = AIRFLOW_ROOT_PATH / "airflow-core" / "docs" / "extra-packages-ref.rst" PYPROJECT_TOML_FILE_PATH = AIRFLOW_ROOT_PATH / "pyproject.toml" diff --git a/scripts/ci/pre_commit/check_i18n_json.py b/scripts/ci/pre_commit/check_i18n_json.py new file mode 100644 index 0000000000000..31051092e4edb --- /dev/null +++ b/scripts/ci/pre_commit/check_i18n_json.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python3 +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Pre-commit script to check that all .json files in airflow-core/src/airflow/ui/public/i18n/locales/ +are valid JSON and do not contain any 'TODO:' entries. +""" + +from __future__ import annotations + +import json +import sys +from pathlib import Path + +COMMON_PRECOMMIT_PATH = Path(__file__).parent.resolve() + +sys.path.insert(0, COMMON_PRECOMMIT_PATH.as_posix()) # make sure common_precommit_utils is imported +from common_precommit_utils import AIRFLOW_ROOT_PATH, console + +LOCALES_DIR = AIRFLOW_ROOT_PATH / "airflow-core" / "src" / "airflow" / "ui" / "public" / "i18n" / "locales" + + +def main(): + failed = False + for json_file in LOCALES_DIR.rglob("*.json"): + console.print(f"[bright_blue]Checking {json_file.relative_to(LOCALES_DIR)}[/]") + rel_path = json_file.relative_to(Path.cwd()) if Path.cwd() in json_file.parents else json_file + try: + content = json_file.read_text(encoding="utf-8") + if "TODO:" in content: + console.print(f"[bold red][FAIL][/bold red] 'TODO:' found in [yellow]{rel_path}[/yellow]") + failed = True + # Check if valid JSON + try: + json.loads(content) + except Exception as e: + console.print( + f"[bold red][FAIL][/bold red] Invalid JSON in [yellow]{rel_path}[/yellow]: [red]{e}[/red]" + ) + failed = True + except Exception as e: + console.print( + f"[bold red][FAIL][/bold red] Could not read [yellow]{rel_path}[/yellow]: [red]{e}[/red]" + ) + failed = True + if failed: + console.print( + "\n[bold red][ERROR][/bold red] Some JSON files are invalid or contain 'TODO:'. Commit aborted." + ) + sys.exit(1) + console.print("[bold green][OK][/bold green] All JSON files are valid and do not contain 'TODO:'.") + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/scripts/ci/pre_commit/check_imports_in_providers.py b/scripts/ci/pre_commit/check_imports_in_providers.py index cecf6e9e4ead6..4ddf91c3070fb 100755 --- a/scripts/ci/pre_commit/check_imports_in_providers.py +++ b/scripts/ci/pre_commit/check_imports_in_providers.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -17,86 +17,22 @@ # under the License. from __future__ import annotations -import json -import os.path -import subprocess import sys from pathlib import Path sys.path.insert(0, str(Path(__file__).parent.resolve())) from common_precommit_utils import ( - AIRFLOW_PROVIDERS_ROOT_PATH, - AIRFLOW_ROOT_PATH, - console, - get_provider_base_dir_from_path, - get_provider_id_from_path, + initialize_breeze_precommit, + run_command_via_breeze_shell, + validate_cmd_result, ) -errors_found = False +initialize_breeze_precommit(__name__, __file__) +cmd_result = run_command_via_breeze_shell( + ["python3", "/opt/airflow/scripts/in_container/run_check_imports_in_providers.py"], + backend="postgres", + skip_environment_initialization=False, +) -def check_imports(folders_to_check: list[Path]): - global errors_found - cmd = [ - "ruff", - "analyze", - "graph", - *[ - folder_to_check.as_posix() - for folder_to_check in folders_to_check - if (folder_to_check.parent / "pyproject.toml").exists() - ], - ] - console.print("Cmd", cmd) - import_tree_str = subprocess.check_output(cmd) - import_tree = json.loads(import_tree_str) - # Uncomment these if you want to debug strange dependencies and see if ruff gets it right - console.print("Dependencies discovered by ruff:") - console.print(import_tree) - - for importing_file in sys.argv[1:]: - if not importing_file.startswith("providers/"): - console.print(f"[yellow]Skipping non-provider file: {importing_file}") - continue - importing_file_path = Path(importing_file) - console.print(importing_file_path) - imported_files_array = import_tree.get(importing_file, None) - if imported_files_array is None: - continue - imported_file_paths = [Path(file) for file in imported_files_array] - for imported_file_path in imported_file_paths: - if imported_file_path.name == "version_compat.py": - # Note - this will check also imports from other places - not only from providers - # Which means that import from tests_common, and airflow will be also banned - common_path = os.path.commonpath([importing_file, imported_file_path.as_posix()]) - imported_file_parent_dir = imported_file_path.parent.as_posix() - if common_path != imported_file_parent_dir: - provider_id = get_provider_id_from_path(importing_file_path) - provider_dir = get_provider_base_dir_from_path(importing_file_path) - console.print( - f"\n[red]Invalid import of `version_compat` module in provider {provider_id} in:\n" - ) - console.print(f"[yellow]{importing_file_path}") - console.print( - f"\n[bright_blue]The AIRFLOW_V_X_Y_PLUS import should be " - f"from the {provider_id} provider root directory ({provider_dir}), but it is currently from:" - ) - console.print(f"\n[yellow]{imported_file_path}\n") - console.print( - f"1. Copy `version_compat`.py to `{provider_dir}/version_compat.py` if not there.\n" - f"2. Import the version constants you need as:\n\n" - f"[yellow]from airflow.providers.{provider_id}.version_compat import ...[/]\n" - f"\n" - ) - errors_found = True - - -find_all_source_providers = AIRFLOW_PROVIDERS_ROOT_PATH.rglob("**/src/") - -check_imports([*find_all_source_providers, AIRFLOW_ROOT_PATH / "tests_common"]) - -if errors_found: - console.print("\n[red]Errors found in imports![/]\n") - sys.exit(1) -else: - console.print("\n[green]All version_compat imports are correct![/]\n") +validate_cmd_result(cmd_result) diff --git a/scripts/ci/pre_commit/check_kubeconform.py b/scripts/ci/pre_commit/check_kubeconform.py index 569983089ed64..dd1ce01c07cc1 100755 --- a/scripts/ci/pre_commit/check_kubeconform.py +++ b/scripts/ci/pre_commit/check_kubeconform.py @@ -23,7 +23,7 @@ from pathlib import Path sys.path.insert(0, str(Path(__file__).parent.resolve())) -from common_precommit_utils import console, initialize_breeze_precommit +from common_precommit_utils import AIRFLOW_ROOT_PATH, console, initialize_breeze_precommit initialize_breeze_precommit(__name__, __file__) @@ -32,12 +32,11 @@ console.print("[red]\nError while setting up k8s environment.") sys.exit(res_setup.returncode) -AIRFLOW_SOURCES_DIR = Path(__file__).parents[3].resolve() -HELM_BIN_PATH = AIRFLOW_SOURCES_DIR / ".venv" / "bin" / "helm" +HELM_BIN_PATH = AIRFLOW_ROOT_PATH / ".venv" / "bin" / "helm" ps = subprocess.Popen( [os.fspath(HELM_BIN_PATH), "template", ".", "-f", "values.yaml"], - cwd=AIRFLOW_SOURCES_DIR / "chart", + cwd=AIRFLOW_ROOT_PATH / "chart", stdout=subprocess.PIPE, ) result = subprocess.run( diff --git a/scripts/ci/pre_commit/check_license.py b/scripts/ci/pre_commit/check_license.py index f08b496d538a6..eef501d42689a 100755 --- a/scripts/ci/pre_commit/check_license.py +++ b/scripts/ci/pre_commit/check_license.py @@ -21,18 +21,18 @@ import os import subprocess import sys -from pathlib import Path -AIRFLOW_SOURCES = Path(__file__).parents[3].resolve() +from common_precommit_utils import AIRFLOW_ROOT_PATH + # This is the target of a symlink in airflow/www/static/docs - # and rat exclude doesn't cope with the symlink target doesn't exist -os.makedirs(AIRFLOW_SOURCES / "docs" / "_build" / "html", exist_ok=True) +os.makedirs(AIRFLOW_ROOT_PATH / "docs" / "_build" / "html", exist_ok=True) cmd = [ "docker", "run", "-v", - f"{AIRFLOW_SOURCES}:/opt/airflow", + f"{AIRFLOW_ROOT_PATH}:/opt/airflow", "-t", "--user", f"{os.getuid()}:{os.getgid()}", diff --git a/scripts/ci/pre_commit/check_system_tests_hidden_in_index.py b/scripts/ci/pre_commit/check_system_tests_hidden_in_index.py index e8aa2d63914f5..aba7ef6a8aa0a 100755 --- a/scripts/ci/pre_commit/check_system_tests_hidden_in_index.py +++ b/scripts/ci/pre_commit/check_system_tests_hidden_in_index.py @@ -36,7 +36,7 @@ def check_system_test_entry_hidden(provider_index: Path): console.print(f"[bright_blue]Checking {provider_index}") - provider_path = provider_index.parent.parent.resolve().relative_to(AIRFLOW_PROVIDERS_ROOT_PATH) + provider_path = provider_index.parents[1].resolve().relative_to(AIRFLOW_PROVIDERS_ROOT_PATH) expected_text = f""" .. toctree:: :hidden: diff --git a/scripts/ci/pre_commit/compile_fab_assets.py b/scripts/ci/pre_commit/compile_fab_assets.py index 58f4c9fde36e8..94165ccbc325a 100755 --- a/scripts/ci/pre_commit/compile_fab_assets.py +++ b/scripts/ci/pre_commit/compile_fab_assets.py @@ -37,9 +37,9 @@ FAB_PROVIDER_WWW_HASH_FILE = FAB_PROVIDER_ROOT_PATH / "www-hash.txt" -def get_directory_hash(directory: Path, skip_path_regexp: str | None = None) -> str: +def get_directory_hash(directory: Path, skip_path_regexps: list[str]) -> str: files = sorted(directory.rglob("*")) - if skip_path_regexp: + for skip_path_regexp in skip_path_regexps: matcher = re.compile(skip_path_regexp) files = [file for file in files if not matcher.match(os.fspath(file.resolve()))] sha = hashlib.sha256() @@ -57,19 +57,24 @@ def get_directory_hash(directory: Path, skip_path_regexp: str | None = None) -> INTERNAL_SERVER_ERROR = "500 Internal Server Error" +SKIP_PATH_REGEXPS = [".*/node_modules.*"] -def compile_assets(www_directory: Path, www_hash_file_name: str): - node_modules_directory = www_directory / "node_modules" + +def compile_assets(www_directory: Path): dist_directory = www_directory / "static" / "dist" FAB_PROVIDER_WWW_HASH_FILE.parent.mkdir(exist_ok=True, parents=True) - if node_modules_directory.exists() and dist_directory.exists(): - old_hash = FAB_PROVIDER_WWW_HASH_FILE.read_text() if FAB_PROVIDER_WWW_HASH_FILE.exists() else "" - new_hash = get_directory_hash(www_directory, skip_path_regexp=r".*node_modules.*") + if dist_directory.exists(): + old_hash = ( + FAB_PROVIDER_WWW_HASH_FILE.read_text().strip() if FAB_PROVIDER_WWW_HASH_FILE.exists() else "" + ) + new_hash = get_directory_hash(www_directory, skip_path_regexps=SKIP_PATH_REGEXPS) if new_hash == old_hash: print(f"The '{www_directory}' directory has not changed! Skip regeneration.") return + print("The directory has changed, regenerating assets.") + print("Old hash: " + old_hash) + print("New hash: " + new_hash) else: - shutil.rmtree(node_modules_directory, ignore_errors=True) shutil.rmtree(dist_directory, ignore_errors=True) env = os.environ.copy() env["FORCE_COLOR"] = "true" @@ -88,10 +93,11 @@ def compile_assets(www_directory: Path, www_hash_file_name: str): print(result.stdout + "\n" + result.stderr) sys.exit(result.returncode) subprocess.check_call(["yarn", "run", "build"], cwd=os.fspath(www_directory), env=env) - new_hash = get_directory_hash(www_directory, skip_path_regexp=r".*node_modules.*") + new_hash = get_directory_hash(www_directory, skip_path_regexps=SKIP_PATH_REGEXPS) FAB_PROVIDER_WWW_HASH_FILE.write_text(new_hash + "\n") + print(f"Assets compiled successfully. New hash: {new_hash}") if __name__ == "__main__": # Compile assets for fab provider - compile_assets(FAB_PROVIDER_WWW_PATH, "hash_fab.txt") + compile_assets(FAB_PROVIDER_WWW_PATH) diff --git a/scripts/ci/pre_commit/generate_pypi_readme.py b/scripts/ci/pre_commit/generate_pypi_readme.py index c3699b65e0eda..f8298c049ea99 100755 --- a/scripts/ci/pre_commit/generate_pypi_readme.py +++ b/scripts/ci/pre_commit/generate_pypi_readme.py @@ -18,9 +18,9 @@ from __future__ import annotations import re -from pathlib import Path -AIRFLOW_SOURCES = Path(__file__).parents[3].resolve() +from common_precommit_utils import AIRFLOW_ROOT_PATH + README_SECTIONS_TO_EXTRACT = [ "Apache Airflow", "Requirements", @@ -57,9 +57,9 @@ def extract_section(content, section_name): if __name__ == "__main__": - readme_file = AIRFLOW_SOURCES / "README.md" - pypi_readme_file = AIRFLOW_SOURCES / "generated" / "PYPI_README.md" - license_file = AIRFLOW_SOURCES / "scripts" / "ci" / "license-templates" / "LICENSE.md" + readme_file = AIRFLOW_ROOT_PATH / "README.md" + pypi_readme_file = AIRFLOW_ROOT_PATH / "generated" / "PYPI_README.md" + license_file = AIRFLOW_ROOT_PATH / "scripts" / "ci" / "license-templates" / "LICENSE.md" readme_content = readme_file.read_text() generated_pypi_readme_content = license_file.read_text() + "\n" + PYPI_README_HEADER diff --git a/scripts/ci/pre_commit/inline_scripts_in_docker.py b/scripts/ci/pre_commit/inline_scripts_in_docker.py index ece43801e1db1..36777b9c11669 100755 --- a/scripts/ci/pre_commit/inline_scripts_in_docker.py +++ b/scripts/ci/pre_commit/inline_scripts_in_docker.py @@ -19,7 +19,7 @@ from pathlib import Path -AIRFLOW_SOURCES_DIR = Path(__file__).parents[3].resolve() +from common_precommit_utils import AIRFLOW_ROOT_PATH def insert_content(file_path: Path, content: list[str], header: str, footer: str, file_name: str): @@ -39,9 +39,9 @@ def insert_content(file_path: Path, content: list[str], header: str, footer: str if __name__ == "__main__": - DOCKERFILE_FILE = AIRFLOW_SOURCES_DIR / "Dockerfile" - DOCKERFILE_CI_FILE = AIRFLOW_SOURCES_DIR / "Dockerfile.ci" - SCRIPTS_DOCKER_DIR = AIRFLOW_SOURCES_DIR / "scripts" / "docker" + DOCKERFILE_FILE = AIRFLOW_ROOT_PATH / "Dockerfile" + DOCKERFILE_CI_FILE = AIRFLOW_ROOT_PATH / "Dockerfile.ci" + SCRIPTS_DOCKER_DIR = AIRFLOW_ROOT_PATH / "scripts" / "docker" for file in [DOCKERFILE_FILE, DOCKERFILE_CI_FILE]: for script in SCRIPTS_DOCKER_DIR.iterdir(): diff --git a/scripts/ci/pre_commit/lint_dockerfile.py b/scripts/ci/pre_commit/lint_dockerfile.py index ec4941023c2a3..259e5c97e62f8 100755 --- a/scripts/ci/pre_commit/lint_dockerfile.py +++ b/scripts/ci/pre_commit/lint_dockerfile.py @@ -20,9 +20,9 @@ import subprocess import sys -from pathlib import Path -AIRFLOW_SOURCES = Path(__file__).parents[3].resolve() +from common_precommit_utils import AIRFLOW_ROOT_PATH + docker_files = [f"/root/{name}" for name in sys.argv[1:]] print(sys.argv) @@ -30,7 +30,7 @@ "docker", "run", "-v", - f"{AIRFLOW_SOURCES}:/root", + f"{AIRFLOW_ROOT_PATH}:/root", "-w", "/root", "--rm", diff --git a/scripts/ci/pre_commit/lint_helm.py b/scripts/ci/pre_commit/lint_helm.py index 265d9c982df73..105e293d1a86a 100755 --- a/scripts/ci/pre_commit/lint_helm.py +++ b/scripts/ci/pre_commit/lint_helm.py @@ -32,13 +32,12 @@ console.print("[red]\nError while setting up k8s environment.") sys.exit(res_setup.returncode) -AIRFLOW_SOURCES_DIR = Path(__file__).parents[3].resolve() HELM_BIN_PATH = AIRFLOW_ROOT_PATH / ".venv" / "bin" / "helm" result = subprocess.run( [os.fspath(HELM_BIN_PATH), "lint", ".", "-f", "values.yaml"], check=False, - cwd=AIRFLOW_SOURCES_DIR / "chart", + cwd=AIRFLOW_ROOT_PATH / "chart", ) if res_setup.returncode != 0: console.print("[red]\nError while linting charts.") diff --git a/scripts/ci/pre_commit/prevent_deprecated_sqlalchemy_usage.py b/scripts/ci/pre_commit/prevent_deprecated_sqlalchemy_usage.py new file mode 100644 index 0000000000000..a6357978426e0 --- /dev/null +++ b/scripts/ci/pre_commit/prevent_deprecated_sqlalchemy_usage.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import ast +import sys +from pathlib import Path + +from rich.console import Console + +console = Console(color_system="standard", width=200) + + +def check_session_query(mod: ast.Module) -> int: + errors = False + for node in ast.walk(mod): + if isinstance(node, ast.Call) and isinstance(node.func, ast.Attribute): + if ( + node.func.attr == "query" + and isinstance(node.func.value, ast.Name) + and node.func.value.id == "session" + ): + console.print( + f"\nUse of legacy `session.query` detected on line {node.lineno}. " + f"\nSQLAlchemy 2.0 deprecates the `Query` object" + f"use the `select()` construct instead." + ) + errors = True + return errors + + +def main(): + for file in sys.argv[1:]: + file_path = Path(file) + ast_module = ast.parse(file_path.read_text(encoding="utf-8"), file) + errors = check_session_query(ast_module) + return 1 if errors else 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/scripts/ci/pre_commit/sort_in_the_wild.py b/scripts/ci/pre_commit/sort_in_the_wild.py index 64b1cdb257c86..59099ea3acdf9 100755 --- a/scripts/ci/pre_commit/sort_in_the_wild.py +++ b/scripts/ci/pre_commit/sort_in_the_wild.py @@ -19,7 +19,8 @@ import re import sys -from pathlib import Path + +from common_precommit_utils import AIRFLOW_ROOT_PATH if __name__ not in ("__main__", "__mp_main__"): raise SystemExit( @@ -28,7 +29,6 @@ ) -AIRFLOW_SOURCES = Path(__file__).parents[3].resolve() NUMBER_MATCH = re.compile(r"(^\d+\.)") @@ -37,7 +37,7 @@ def stable_sort(x): if __name__ == "__main__": - inthewild_path = Path(AIRFLOW_SOURCES) / "INTHEWILD.md" + inthewild_path = AIRFLOW_ROOT_PATH / "INTHEWILD.md" content = inthewild_path.read_text() header = [] companies = [] diff --git a/scripts/ci/pre_commit/sort_installed_providers.py b/scripts/ci/pre_commit/sort_installed_providers.py index c97addf50178b..8a702e2f21210 100755 --- a/scripts/ci/pre_commit/sort_installed_providers.py +++ b/scripts/ci/pre_commit/sort_installed_providers.py @@ -19,6 +19,8 @@ from pathlib import Path +from common_precommit_utils import AIRFLOW_ROOT_PATH + if __name__ not in ("__main__", "__mp_main__"): raise SystemExit( "This file is intended to be executed as an executable program. You cannot use it as a module." @@ -26,9 +28,6 @@ ) -AIRFLOW_SOURCES = Path(__file__).parents[3].resolve() - - def stable_sort(x): return x.casefold(), x @@ -44,5 +43,5 @@ def sort_file(path: Path): if __name__ == "__main__": - prod_image_installed_providers_path = AIRFLOW_SOURCES / "prod_image_installed_providers.txt" + prod_image_installed_providers_path = AIRFLOW_ROOT_PATH / "prod_image_installed_providers.txt" sort_file(prod_image_installed_providers_path) diff --git a/scripts/ci/pre_commit/sort_spelling_wordlist.py b/scripts/ci/pre_commit/sort_spelling_wordlist.py index 41d7a3ce428d2..5e2ebbb04e98b 100755 --- a/scripts/ci/pre_commit/sort_spelling_wordlist.py +++ b/scripts/ci/pre_commit/sort_spelling_wordlist.py @@ -17,7 +17,7 @@ # under the License. from __future__ import annotations -from pathlib import Path +from common_precommit_utils import AIRFLOW_ROOT_PATH if __name__ not in ("__main__", "__mp_main__"): raise SystemExit( @@ -26,9 +26,6 @@ ) -AIRFLOW_SOURCES = Path(__file__).parents[3].resolve() - - def stable_sort(x): return x.casefold(), x @@ -38,7 +35,7 @@ def sort_uniq(sequence): if __name__ == "__main__": - spelling_wordlist_path = Path(AIRFLOW_SOURCES) / "docs" / "spelling_wordlist.txt" + spelling_wordlist_path = AIRFLOW_ROOT_PATH / "docs" / "spelling_wordlist.txt" content = spelling_wordlist_path.read_text().splitlines(keepends=True) sorted_content = sort_uniq(content) spelling_wordlist_path.write_text("".join(sorted_content)) diff --git a/scripts/ci/pre_commit/supported_versions.py b/scripts/ci/pre_commit/supported_versions.py index 613924095892d..2f972a62c4ba7 100755 --- a/scripts/ci/pre_commit/supported_versions.py +++ b/scripts/ci/pre_commit/supported_versions.py @@ -21,7 +21,7 @@ from tabulate import tabulate -AIRFLOW_SOURCES = Path(__file__).resolve().parent.parent.parent.parent +AIRFLOW_SOURCES = Path(__file__).resolve().parents[3] HEADERS = ( diff --git a/scripts/ci/pre_commit/ts_compile_lint_simple_auth_manager_ui.py b/scripts/ci/pre_commit/ts_compile_lint_simple_auth_manager_ui.py index 301509269d889..1a9e0ecc4d0c4 100755 --- a/scripts/ci/pre_commit/ts_compile_lint_simple_auth_manager_ui.py +++ b/scripts/ci/pre_commit/ts_compile_lint_simple_auth_manager_ui.py @@ -54,8 +54,8 @@ if any("/openapi/" in file for file in original_files): run_command(["pnpm", "codegen"], cwd=dir) if all_non_yaml_files: - run_command(["pnpm", "prettier", "--write", *all_non_yaml_files], cwd=dir) run_command(["pnpm", "eslint", "--fix", *all_non_yaml_files], cwd=dir) + run_command(["pnpm", "prettier", "--write", *all_non_yaml_files], cwd=dir) if all_ts_files: with temporary_tsc_project(dir / "tsconfig.app.json", all_ts_files) as tsc_project: run_command(["pnpm", "tsc", "--p", tsc_project.name], cwd=dir) diff --git a/scripts/ci/pre_commit/ts_compile_lint_ui.py b/scripts/ci/pre_commit/ts_compile_lint_ui.py index 6288806ba7194..3e817285e2230 100755 --- a/scripts/ci/pre_commit/ts_compile_lint_ui.py +++ b/scripts/ci/pre_commit/ts_compile_lint_ui.py @@ -42,7 +42,7 @@ files = [ file[len(relative_dir.as_posix()) + 1 :] for file in original_files - if Path(file).is_relative_to(relative_dir) and "openapi-gen/" not in file + if Path(file).is_relative_to(relative_dir) ] all_non_yaml_files = [file for file in files if not file.endswith(".yaml")] print("All non-YAML files:", all_non_yaml_files) @@ -54,8 +54,8 @@ if any("/openapi/" in file for file in original_files): run_command(["pnpm", "codegen"], cwd=dir) if all_non_yaml_files: - run_command(["pnpm", "prettier", "--write", *all_non_yaml_files], cwd=dir) run_command(["pnpm", "eslint", "--fix", *all_non_yaml_files], cwd=dir) + run_command(["pnpm", "prettier", "--write", *all_non_yaml_files], cwd=dir) if all_ts_files: with temporary_tsc_project(dir / "tsconfig.app.json", all_ts_files) as tsc_project: run_command(["pnpm", "tsc", "--p", tsc_project.name], cwd=dir) diff --git a/scripts/ci/pre_commit/update_airflow_pyproject_toml.py b/scripts/ci/pre_commit/update_airflow_pyproject_toml.py index 6dc8853026fad..9a511a80298df 100755 --- a/scripts/ci/pre_commit/update_airflow_pyproject_toml.py +++ b/scripts/ci/pre_commit/update_airflow_pyproject_toml.py @@ -57,7 +57,7 @@ MIN_VERSION_OVERRIDE: dict[str, Version] = { "amazon": parse_version("2.1.3"), "fab": parse_version("2.2.0"), - "openlineage": parse_version("2.1.3"), + "openlineage": parse_version("2.3.0"), "git": parse_version("0.0.2"), "common.messaging": parse_version("1.0.1"), } diff --git a/scripts/ci/pre_commit/update_black_version.py b/scripts/ci/pre_commit/update_black_version.py index 5e4fda5ed379c..b7fae35572324 100755 --- a/scripts/ci/pre_commit/update_black_version.py +++ b/scripts/ci/pre_commit/update_black_version.py @@ -18,15 +18,12 @@ from __future__ import annotations import re -from pathlib import Path import yaml - -AIRFLOW_SOURCES = Path(__file__).parents[3].resolve() - +from common_precommit_utils import AIRFLOW_ROOT_PATH if __name__ == "__main__": - PRE_COMMIT_CONFIG_FILE = AIRFLOW_SOURCES / ".pre-commit-config.yaml" + PRE_COMMIT_CONFIG_FILE = AIRFLOW_ROOT_PATH / ".pre-commit-config.yaml" pre_commit_content = yaml.safe_load(PRE_COMMIT_CONFIG_FILE.read_text()) for repo in pre_commit_content["repos"]: if repo["repo"] == "https://github.com/psf/black": diff --git a/scripts/ci/pre_commit/update_installers_and_pre_commit.py b/scripts/ci/pre_commit/update_installers_and_pre_commit.py index ccc75e13ebc1e..ef1b745f9fe5d 100755 --- a/scripts/ci/pre_commit/update_installers_and_pre_commit.py +++ b/scripts/ci/pre_commit/update_installers_and_pre_commit.py @@ -50,6 +50,7 @@ (AIRFLOW_ROOT_PATH / ".github" / "actions" / "install-pre-commit" / "action.yml", False), (AIRFLOW_ROOT_PATH / "dev/" / "breeze" / "doc" / "ci" / "02_images.md", True), (AIRFLOW_ROOT_PATH / ".pre-commit-config.yaml", False), + (AIRFLOW_ROOT_PATH / ".github" / "workflows" / "ci-amd.yml", False), (AIRFLOW_CORE_ROOT_PATH / "pyproject.toml", False), ] @@ -100,7 +101,7 @@ class Quoting(Enum): (re.compile(r"(\| *`AIRFLOW_UV_VERSION` *\| *)(`[0-9.]+`)( *\|)"), Quoting.REVERSE_SINGLE_QUOTED), ( re.compile( - r"(default: \")([0-9.]+)(\" # Keep this comment to " + r"(\")([0-9.]+)(\" # Keep this comment to " r"allow automatic replacement of uv version)" ), Quoting.UNQUOTED, @@ -123,7 +124,7 @@ class Quoting(Enum): ), ( re.compile( - r"(default: \")([0-9.]+)(\" # Keep this comment to allow automatic " + r"(\")([0-9.]+)(\" # Keep this comment to allow automatic " r"replacement of pre-commit version)" ), Quoting.UNQUOTED, @@ -142,7 +143,7 @@ class Quoting(Enum): ), ( re.compile( - r"(default: \")([0-9.]+)(\" # Keep this comment to allow automatic " + r"(\")([0-9.]+)(\" # Keep this comment to allow automatic " r"replacement of pre-commit-uv version)" ), Quoting.UNQUOTED, diff --git a/scripts/ci/testing/run_breeze_command_with_retries.sh b/scripts/ci/testing/run_breeze_command_with_retries.sh deleted file mode 100755 index 6044fe7d1f3f9..0000000000000 --- a/scripts/ci/testing/run_breeze_command_with_retries.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# If you want different number of retries for your breeze command, please set NUMBER_OF_ATTEMPT environment variable. -# Default number of retries is 3 unless NUMBER_OF_ATTEMPT is set. -export COLOR_RED=$'\e[31m' -export COLOR_YELLOW=$'\e[33m' -export COLOR_RESET=$'\e[0m' - -NUMBER_OF_ATTEMPT="${NUMBER_OF_ATTEMPT:-3}" - -for i in $(seq 1 "$NUMBER_OF_ATTEMPT") ; do - breeze down - set +e - if breeze "$@"; then - set -e - exit 0 - else - echo - echo "${COLOR_YELLOW}Breeze Command failed. Retrying again.${COLOR_RESET}" - echo - echo "This could be due to a flaky test, re-running once to re-check it After restarting docker." - echo "Current Attempt: ${i}, Attempt Left: $((NUMBER_OF_ATTEMPT-i))" - echo - fi - set -e - sudo service docker restart -done - -echo "{COLOR_RED} Tried ${NUMBER_OF_ATTEMPT} times but breeze command failed. Exiting... ${COLOR_RESET}" -exit 1 diff --git a/scripts/ci/testing/run_unit_tests.sh b/scripts/ci/testing/run_unit_tests.sh index bba4055acba52..3a4069cdf0761 100755 --- a/scripts/ci/testing/run_unit_tests.sh +++ b/scripts/ci/testing/run_unit_tests.sh @@ -120,6 +120,15 @@ function task_sdk_tests() { echo "${COLOR_BLUE}Task SDK tests completed${COLOR_RESET}" } +function go_sdk_tests() { + echo "${COLOR_BLUE}Running Go SDK tests${COLOR_RESET}" + set -x + cd go-sdk + go test -v ./... + set +x + echo "${COLOR_BLUE}Go SDK tests completed${COLOR_RESET}" +} + function airflow_ctl_tests() { echo "${COLOR_BLUE}Running Airflow CTL tests${COLOR_RESET}" @@ -137,6 +146,8 @@ function run_tests() { providers_tests elif [[ "${TEST_GROUP}" == "task-sdk" ]]; then task_sdk_tests + elif [[ "${TEST_GROUP}" == "go-sdk" ]]; then + go_sdk_tests elif [[ "${TEST_GROUP}" == "airflow-ctl" ]]; then airflow_ctl_tests else diff --git a/scripts/cov/cli_coverage.py b/scripts/cov/cli_coverage.py index ca85d2e41ea15..6c23524580610 100644 --- a/scripts/cov/cli_coverage.py +++ b/scripts/cov/cli_coverage.py @@ -23,28 +23,32 @@ sys.path.insert(0, str(Path(__file__).parent.resolve())) -source_files = ["airflow/cli"] +source_files = ["airflow-core/src/airflow/cli"] -cli_files = ["tests/cli"] +cli_files = ["airflow-core/tests/unit/cli"] files_not_fully_covered = [ - "airflow/cli/cli_config.py", - "airflow/cli/cli_parser.py", - "airflow/cli/commands/config_command.py", - "airflow/cli/commands/connection_command.py", - "airflow/cli/commands/dag_command.py", - "airflow/cli/commands/dag_processor_command.py", - "airflow/cli/commands/db_command.py", - "airflow/cli/commands/info_command.py", - "airflow/cli/commands/jobs_command.py", - "airflow/cli/commands/plugins_command.py", - "airflow/cli/commands/pool_command.py", - "airflow/cli/commands/provider_command.py", - "airflow/cli/commands/scheduler_command.py", - "airflow/cli/commands/standalone_command.py", - "airflow/cli/commands/task_command.py", - "airflow/cli/commands/variable_command.py", - "airflow/cli/simple_table.py", + "airflow-core/src/airflow/cli/cli_config.py", + "airflow-core/src/airflow/cli/cli_parser.py", + "airflow-core/src/airflow/cli/commands/api_server_command.py", + "airflow-core/src/airflow/cli/commands/asset_command.py", + "airflow-core/src/airflow/cli/commands/backfill_command.py", + "airflow-core/src/airflow/cli/commands/config_command.py", + "airflow-core/src/airflow/cli/commands/connection_command.py", + "airflow-core/src/airflow/cli/commands/daemon_utils.py", + "airflow-core/src/airflow/cli/commands/dag_command.py", + "airflow-core/src/airflow/cli/commands/db_command.py", + "airflow-core/src/airflow/cli/commands/info_command.py", + "airflow-core/src/airflow/cli/commands/jobs_command.py", + "airflow-core/src/airflow/cli/commands/plugins_command.py", + "airflow-core/src/airflow/cli/commands/pool_command.py", + "airflow-core/src/airflow/cli/commands/provider_command.py", + "airflow-core/src/airflow/cli/commands/rotate_fernet_key_command.py", + "airflow-core/src/airflow/cli/commands/standalone_command.py", + "airflow-core/src/airflow/cli/commands/task_command.py", + "airflow-core/src/airflow/cli/commands/variable_command.py", + "airflow-core/src/airflow/cli/simple_table.py", + "airflow-core/src/airflow/cli/utils.py", ] if __name__ == "__main__": diff --git a/scripts/cov/core_coverage.py b/scripts/cov/core_coverage.py index 0366fdbed200a..317aca4d18826 100644 --- a/scripts/cov/core_coverage.py +++ b/scripts/cov/core_coverage.py @@ -24,141 +24,137 @@ sys.path.insert(0, str(Path(__file__).parent.resolve())) source_files = [ - "airflow/executors", - "airflow/jobs", - "airflow/models", - "airflow/serialization", - "airflow/ti_deps", - "airflow/utils", + "airflow-core/src/airflow/executors", + "airflow-core/src/airflow/jobs", + "airflow-core/src/airflow/models", + "airflow-core/src/airflow/serialization", + "airflow-core/src/airflow/ti_deps", + "airflow-core/src/airflow/utils", ] files_not_fully_covered = [ # executors - "airflow/executors/executor_loader.py", - "airflow/executors/local_executor.py", - "airflow/executors/sequential_executor.py", + "airflow-core/src/airflow/executors/base_executor.py", + "airflow-core/src/airflow/executors/executor_loader.py", + "airflow-core/src/airflow/executors/local_executor.py", # jobs - "airflow/jobs/backfill_job_runner.py", - "airflow/jobs/base_job_runner.py", - "airflow/jobs/dag_processor_job_runner.py", - "airflow/jobs/job.py", - "airflow/jobs/local_task_job_runner.py", - "airflow/jobs/scheduler_job_runner.py", - "airflow/jobs/triggerer_job_runner.py", + "airflow-core/src/airflow/jobs/base_job_runner.py", + "airflow-core/src/airflow/jobs/dag_processor_job_runner.py", + "airflow-core/src/airflow/jobs/job.py", + "airflow-core/src/airflow/jobs/scheduler_job_runner.py", + "airflow-core/src/airflow/jobs/triggerer_job_runner.py", # models - "airflow/models/abstractoperator.py", - "airflow/models/asset.py", - "airflow/models/base.py", - "airflow/models/baseoperator.py", - "airflow/models/connection.py", - "airflow/models/crypto.py", - "airflow/models/dag.py", - "airflow/models/dagbag.py", - "airflow/models/dagcode.py", - "airflow/models/dagpickle.py", - "airflow/models/dagrun.py", - "airflow/models/dagwarning.py", - "airflow/models/expandinput.py", - "airflow/models/log.py", - "airflow/models/mappedoperator.py", - "airflow/models/param.py", - "airflow/models/pool.py", - "airflow/models/renderedtifields.py", - "airflow/models/serialized_dag.py", - "airflow/models/skipmixin.py", - "airflow/models/taskinstance.py", - "airflow/models/taskinstancekey.py", - "airflow/models/taskmap.py", - "airflow/models/taskmixin.py", - "airflow/models/taskreschedule.py", - "airflow/models/trigger.py", - "airflow/models/variable.py", - "airflow/models/xcom.py", - "airflow/models/xcom_arg.py", + "airflow-core/src/airflow/models/asset.py", + "airflow-core/src/airflow/models/backfill.py", + "airflow-core/src/airflow/models/base.py", + "airflow-core/src/airflow/models/baseoperator.py", + "airflow-core/src/airflow/models/connection.py", + "airflow-core/src/airflow/models/crypto.py", + "airflow-core/src/airflow/models/dag.py", + "airflow-core/src/airflow/models/dag_version.py", + "airflow-core/src/airflow/models/dagbag.py", + "airflow-core/src/airflow/models/dagcode.py", + "airflow-core/src/airflow/models/dagrun.py", + "airflow-core/src/airflow/models/dagwarning.py", + "airflow-core/src/airflow/models/deadline.py", + "airflow-core/src/airflow/models/expandinput.py", + "airflow-core/src/airflow/models/log.py", + "airflow-core/src/airflow/models/mappedoperator.py", + "airflow-core/src/airflow/models/param.py", + "airflow-core/src/airflow/models/pool.py", + "airflow-core/src/airflow/models/renderedtifields.py", + "airflow-core/src/airflow/models/serialized_dag.py", + "airflow-core/src/airflow/models/taskinstance.py", + "airflow-core/src/airflow/models/taskinstancehistory.py", + "airflow-core/src/airflow/models/taskinstancekey.py", + "airflow-core/src/airflow/models/taskmap.py", + "airflow-core/src/airflow/models/taskmixin.py", + "airflow-core/src/airflow/models/trigger.py", + "airflow-core/src/airflow/models/variable.py", + "airflow-core/src/airflow/models/xcom.py", + "airflow-core/src/airflow/models/xcom_arg.py", # serialization - "airflow/serialization/json_schema.py", - "airflow/serialization/pydantic/dag_run.py", - "airflow/serialization/pydantic/job.py", - "airflow/serialization/pydantic/taskinstance.py", - "airflow/serialization/serde.py", - "airflow/serialization/serialized_objects.py", - "airflow/serialization/serializers/bignum.py", - "airflow/serialization/serializers/builtin.py", - "airflow/serialization/serializers/datetime.py", - "airflow/serialization/serializers/kubernetes.py", - "airflow/serialization/serializers/numpy.py", - "airflow/serialization/serializers/pandas.py", - "airflow/serialization/serializers/timezone.py", + "airflow-core/src/airflow/serialization/json_schema.py", + "airflow-core/src/airflow/serialization/serde.py", + "airflow-core/src/airflow/serialization/serialized_objects.py", + "airflow-core/src/airflow/serialization/serializers/bignum.py", + "airflow-core/src/airflow/serialization/serializers/builtin.py", + "airflow-core/src/airflow/serialization/serializers/datetime.py", + "airflow-core/src/airflow/serialization/serializers/deltalake.py", + "airflow-core/src/airflow/serialization/serializers/iceberg.py", + "airflow-core/src/airflow/serialization/serializers/kubernetes.py", + "airflow-core/src/airflow/serialization/serializers/numpy.py", + "airflow-core/src/airflow/serialization/serializers/pandas.py", + "airflow-core/src/airflow/serialization/serializers/timezone.py", # ti_deps - "airflow/ti_deps/deps/base_ti_dep.py", - "airflow/ti_deps/deps/exec_date_after_start_date_dep.py", - "airflow/ti_deps/deps/not_previously_skipped_dep.py", - "airflow/ti_deps/deps/pool_slots_available_dep.py", - "airflow/ti_deps/deps/prev_dagrun_dep.py", - "airflow/ti_deps/deps/task_not_running_dep.py", - "airflow/ti_deps/deps/trigger_rule_dep.py", - "airflow/ti_deps/deps/valid_state_dep.py", + "airflow-core/src/airflow/ti_deps/deps/base_ti_dep.py", + "airflow-core/src/airflow/ti_deps/deps/exec_date_after_start_date_dep.py", + "airflow-core/src/airflow/ti_deps/deps/mapped_task_expanded.py", + "airflow-core/src/airflow/ti_deps/deps/not_previously_skipped_dep.py", + "airflow-core/src/airflow/ti_deps/deps/pool_slots_available_dep.py", + "airflow-core/src/airflow/ti_deps/deps/prev_dagrun_dep.py", + "airflow-core/src/airflow/ti_deps/deps/ready_to_reschedule.py", + "airflow-core/src/airflow/ti_deps/deps/runnable_exec_date_dep.py", + "airflow-core/src/airflow/ti_deps/deps/task_not_running_dep.py", + "airflow-core/src/airflow/ti_deps/deps/trigger_rule_dep.py", + "airflow-core/src/airflow/ti_deps/deps/valid_state_dep.py", # utils - "airflow/utils/cli.py", - "airflow/utils/cli_action_loggers.py", - "airflow/utils/code_utils.py", - "airflow/utils/context.py", - "airflow/utils/dag_cycle_tester.py", - "airflow/utils/dates.py", - "airflow/utils/db.py", - "airflow/utils/db_cleanup.py", - "airflow/utils/decorators.py", - "airflow/utils/deprecation_tools.py", - "airflow/utils/docs.py", - "airflow/utils/dot_renderer.py", - "airflow/utils/edgemodifier.py", - "airflow/utils/email.py", - "airflow/utils/entry_points.py", - "airflow/utils/file.py", - "airflow/utils/hashlib_wrapper.py", - "airflow/utils/helpers.py", - "airflow/utils/json.py", - "airflow/utils/log/action_logger.py", - "airflow/utils/log/colored_log.py", - "airflow/utils/log/file_processor_handler.py", - "airflow/utils/log/file_task_handler.py", - "airflow/utils/log/json_formatter.py", - "airflow/utils/log/log_reader.py", - "airflow/utils/log/logging_mixin.py", - "airflow/utils/log/non_caching_file_handler.py", - "airflow/utils/log/secrets_masker.py", - "airflow/utils/log/task_handler_with_custom_formatter.py", - "airflow/utils/log/trigger_handler.py", - "airflow/utils/mixins.py", - "airflow/utils/module_loading.py", - "airflow/utils/net.py", - "airflow/utils/operator_helpers.py", - "airflow/utils/operator_resources.py", - "airflow/utils/orm_event_handlers.py", - "airflow/utils/platform.py", - "airflow/utils/process_utils.py", - "airflow/utils/python_virtualenv.py", - "airflow/utils/scheduler_health.py", - "airflow/utils/serve_logs.py", - "airflow/utils/session.py", - "airflow/utils/setup_teardown.py", - "airflow/utils/sqlalchemy.py", - "airflow/utils/state.py", - "airflow/utils/strings.py", - "airflow/utils/task_group.py", - "airflow/utils/task_instance_session.py", - "airflow/utils/timeout.py", - "airflow/utils/timezone.py", - "airflow/utils/yaml.py", + "airflow-core/src/airflow/utils/cli.py", + "airflow-core/src/airflow/utils/cli_action_loggers.py", + "airflow-core/src/airflow/utils/code_utils.py", + "airflow-core/src/airflow/utils/context.py", + "airflow-core/src/airflow/utils/dag_parsing_context.py", + "airflow-core/src/airflow/utils/db.py", + "airflow-core/src/airflow/utils/db_cleanup.py", + "airflow-core/src/airflow/utils/db_manager.py", + "airflow-core/src/airflow/utils/decorators.py", + "airflow-core/src/airflow/utils/deprecation_tools.py", + "airflow-core/src/airflow/utils/dot_renderer.py", + "airflow-core/src/airflow/utils/edgemodifier.py", + "airflow-core/src/airflow/utils/email.py", + "airflow-core/src/airflow/utils/entry_points.py", + "airflow-core/src/airflow/utils/file.py", + "airflow-core/src/airflow/utils/helpers.py", + "airflow-core/src/airflow/utils/json.py", + "airflow-core/src/airflow/utils/log/action_logger.py", + "airflow-core/src/airflow/utils/log/colored_log.py", + "airflow-core/src/airflow/utils/log/file_processor_handler.py", + "airflow-core/src/airflow/utils/log/file_task_handler.py", + "airflow-core/src/airflow/utils/log/json_formatter.py", + "airflow-core/src/airflow/utils/log/log_reader.py", + "airflow-core/src/airflow/utils/log/logging_mixin.py", + "airflow-core/src/airflow/utils/log/non_caching_file_handler.py", + "airflow-core/src/airflow/utils/log/task_handler_with_custom_formatter.py", + "airflow-core/src/airflow/utils/module_loading.py", + "airflow-core/src/airflow/utils/net.py", + "airflow-core/src/airflow/utils/operator_resources.py", + "airflow-core/src/airflow/utils/orm_event_handlers.py", + "airflow-core/src/airflow/utils/platform.py", + "airflow-core/src/airflow/utils/process_utils.py", + "airflow-core/src/airflow/utils/retries.py", + "airflow-core/src/airflow/utils/scheduler_health.py", + "airflow-core/src/airflow/utils/serve_logs.py", + "airflow-core/src/airflow/utils/session.py", + "airflow-core/src/airflow/utils/setup_teardown.py", + "airflow-core/src/airflow/utils/span_status.py", + "airflow-core/src/airflow/utils/sqlalchemy.py", + "airflow-core/src/airflow/utils/state.py", + "airflow-core/src/airflow/utils/strings.py", + "airflow-core/src/airflow/utils/task_group.py", + "airflow-core/src/airflow/utils/task_instance_session.py", + "airflow-core/src/airflow/utils/timeout.py", + "airflow-core/src/airflow/utils/timezone.py", + "airflow-core/src/airflow/utils/weight_rule.py", + "airflow-core/src/airflow/utils/yaml.py", ] core_files = [ - "tests/core", - "tests/executors", - "tests/jobs", - "tests/models", - "tests/serialization", - "tests/ti_deps", - "tests/utils", + "airflow-core/tests/unit/core", + "airflow-core/tests/unit/executors", + "airflow-core/tests/unit/jobs", + "airflow-core/tests/unit/models", + "airflow-core/tests/unit/serialization", + "airflow-core/tests/unit/ti_deps", + "airflow-core/tests/unit/utils", ] diff --git a/scripts/cov/cov_runner.py b/scripts/cov/cov_runner.py index 8db53ab019bcf..65f924d59f8c1 100644 --- a/scripts/cov/cov_runner.py +++ b/scripts/cov/cov_runner.py @@ -17,7 +17,6 @@ from __future__ import annotations import glob -import os import pytest from coverage import Coverage @@ -58,10 +57,6 @@ def run_tests(command_list, source, files_not_fully_covered): if failed: print("There are some coverage errors. Please fix them") if len(files_not_fully_covered) > 0: - print("Coverage run completed. Use the link below to see the coverage report") - breeze = os.environ.get("BREEZE", "false") - port = "8080" - if breeze.lower() == "true": - port = "28080" - print(f"http://localhost:{port}/dev/coverage/index.html") - print("You need to start the webserver before you can access the above link.") + print("Coverage run completed. Use the following commands to see the coverage report") + print("cd htmlcov/; python -m http.server 5555") + print("Once the server is running, open this link in your browser: http://localhost:25555") diff --git a/scripts/cov/other_coverage.py b/scripts/cov/other_coverage.py index aca0721c33d7e..914f3b047d8c2 100644 --- a/scripts/cov/other_coverage.py +++ b/scripts/cov/other_coverage.py @@ -63,8 +63,8 @@ ] other_tests = [ - "airflow-core/tests/dag_processing", - "airflow-core/tests/jobs", + "airflow-core/tests/unit/dag_processing", + "airflow-core/tests/unit/jobs", ] """ diff --git a/scripts/cov/restapi_coverage.py b/scripts/cov/restapi_coverage.py index f87ce9880c5f7..cc80db9241db6 100644 --- a/scripts/cov/restapi_coverage.py +++ b/scripts/cov/restapi_coverage.py @@ -23,7 +23,7 @@ sys.path.insert(0, str(Path(__file__).parent.resolve())) -source_files = ["airflow/api_fastapi"] +source_files = ["airflow-core/tests/unit/api_fastapi"] files_not_fully_covered: list[str] = [] diff --git a/scripts/docker/entrypoint_ci.sh b/scripts/docker/entrypoint_ci.sh index 7813c351b318d..9dcb3ee65837f 100755 --- a/scripts/docker/entrypoint_ci.sh +++ b/scripts/docker/entrypoint_ci.sh @@ -19,7 +19,6 @@ if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then set -x fi - # shellcheck source=scripts/in_container/_in_container_script_init.sh . "${AIRFLOW_SOURCES:-/opt/airflow}"/scripts/in_container/_in_container_script_init.sh @@ -221,7 +220,7 @@ function determine_airflow_to_use() { echo "${COLOR_BLUE}Uninstalling all packages first${COLOR_RESET}" echo # shellcheck disable=SC2086 - ${PACKAGING_TOOL_CMD} freeze | grep -ve "^-e" | grep -ve "^#" | grep -ve "^uv" | \ + ${PACKAGING_TOOL_CMD} freeze | grep -ve "^-e" | grep -ve "^#" | grep -ve "^uv" | grep -v "@" | \ xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} # Now install rich ad click first to use the installation script # shellcheck disable=SC2086 @@ -233,7 +232,9 @@ function determine_airflow_to_use() { echo # Use uv run to install necessary dependencies automatically # in the future we will be able to use uv sync when `uv.lock` is supported - uv run /opt/airflow/scripts/in_container/install_development_dependencies.py \ + # for the use in parallel runs in docker containers--no-cache is needed - otherwise there is + # possibility of overriding temporary environments by multiple parallel processes + uv run --no-cache /opt/airflow/scripts/in_container/install_development_dependencies.py \ --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt # Some packages might leave legacy typing module which causes test issues # shellcheck disable=SC2086 @@ -266,7 +267,7 @@ function check_boto_upgrade() { # shellcheck disable=SC2086 ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} aiobotocore s3fs || true # shellcheck disable=SC2086 - ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade boto3 botocore + ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade "boto3<1.38.3" "botocore<1.38.3" set +x } @@ -348,23 +349,32 @@ function check_airflow_python_client_installation() { python "${IN_CONTAINER_DIR}/install_airflow_python_client.py" } +function initialize_db() { + # If we are going to start the api server OR we are a system test (which may or may not start the api server, + # depending on the Airflow version being used to run the tests), then migrate the DB. + if [[ ${START_API_SERVER_WITH_EXAMPLES=} == "true" || ${TEST_GROUP:=""} == "system" ]]; then + echo + echo "${COLOR_BLUE}Initializing database${COLOR_RESET}" + echo + airflow db migrate + echo + echo "${COLOR_BLUE}Database initialized${COLOR_RESET}" + fi +} + function start_api_server_with_examples(){ - # check if we should not start the api server with examples by checking if both - # START_API_SERVER_WITH_EXAMPLES is false AND the TEST_GROUP env var is not equal to "system" + USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION:=""}" + # Do not start the api server if either START_API_SERVER_WITH_EXAMPLES is false or the TEST_GROUP env var is not + # equal to "system". if [[ ${START_API_SERVER_WITH_EXAMPLES=} != "true" && ${TEST_GROUP:=""} != "system" ]]; then return fi + # If the use Airflow version is set and it is <= 3.0.0 (which does not have the API server anyway) also return + if [[ ${USE_AIRFLOW_VERSION} != "" && ${USE_AIRFLOW_VERSION} < "3.0.0" ]]; then + return + fi export AIRFLOW__CORE__LOAD_EXAMPLES=True export AIRFLOW__API__EXPOSE_CONFIG=True - echo - echo "${COLOR_BLUE}Initializing database${COLOR_RESET}" - echo - airflow db migrate - echo - echo "${COLOR_BLUE}Database initialized${COLOR_RESET}" - echo - echo "${COLOR_BLUE}Parsing example dags${COLOR_RESET}" - echo airflow dags reserialize echo "Example dags parsing finished" if airflow config get-value core auth_manager | grep -q "FabAuthManager"; then @@ -401,6 +411,7 @@ check_downgrade_sqlalchemy check_downgrade_pendulum check_force_lowest_dependencies check_airflow_python_client_installation +initialize_db start_api_server_with_examples check_run_tests "${@}" diff --git a/scripts/docker/entrypoint_prod.sh b/scripts/docker/entrypoint_prod.sh index f36dd15a9dab4..cd7a600641fc0 100755 --- a/scripts/docker/entrypoint_prod.sh +++ b/scripts/docker/entrypoint_prod.sh @@ -241,7 +241,7 @@ function check_uid_gid() { >&2 echo " This is to make sure you can run the image with an arbitrary UID in the future." >&2 echo >&2 echo " See more about it in the Airflow's docker image documentation" - >&2 echo " http://airflow.apache.org/docs/docker-stack/entrypoint" + >&2 echo " https://airflow.apache.org/docs/docker-stack/entrypoint.html" >&2 echo # We still allow the image to run with `airflow` user. return @@ -255,7 +255,7 @@ function check_uid_gid() { >&2 echo " This is to make sure you can run the image with an arbitrary UID." >&2 echo >&2 echo " See more about it in the Airflow's docker image documentation" - >&2 echo " http://airflow.apache.org/docs/docker-stack/entrypoint" + >&2 echo " https://airflow.apache.org/docs/docker-stack/entrypoint.html" # This will not work so we fail hard exit 1 fi diff --git a/scripts/docker/install_mysql.sh b/scripts/docker/install_mysql.sh index 7795f6164ae3d..9ed49699c5ca4 100644 --- a/scripts/docker/install_mysql.sh +++ b/scripts/docker/install_mysql.sh @@ -32,6 +32,24 @@ readonly MARIADB_LTS_VERSION="10.11" : "${INSTALL_MYSQL_CLIENT:?Should be true or false}" : "${INSTALL_MYSQL_CLIENT_TYPE:-mariadb}" +retry() { + local retries=3 + local count=0 + # adding delay of 10 seconds + local delay=10 + until "$@"; do + exit_code=$? + count=$((count + 1)) + if [[ $count -lt $retries ]]; then + echo "Command failed. Attempt $count/$retries. Retrying in ${delay}s..." + sleep $delay + else + echo "Command failed after $retries attempts." + return $exit_code + fi + done +} + install_mysql_client() { if [[ "${1}" == "dev" ]]; then packages=("libmysqlclient-dev" "mysql-client") @@ -57,8 +75,8 @@ install_mysql_client() { echo "deb http://repo.mysql.com/apt/debian/ $(lsb_release -cs) mysql-${MYSQL_LTS_VERSION}" > \ /etc/apt/sources.list.d/mysql.list - apt-get update - apt-get install --no-install-recommends -y "${packages[@]}" + retry apt-get update + retry apt-get install --no-install-recommends -y "${packages[@]}" apt-get autoremove -yqq --purge apt-get clean && rm -rf /var/lib/apt/lists/* @@ -102,8 +120,8 @@ install_mariadb_client() { /etc/apt/sources.list.d/mariadb.list # Make sure that dependencies from MariaDB repo are preferred over Debian dependencies printf "Package: *\nPin: release o=MariaDB\nPin-Priority: 999\n" > /etc/apt/preferences.d/mariadb - apt-get update - apt-get install --no-install-recommends -y "${packages[@]}" + retry apt-get update + retry apt-get install --no-install-recommends -y "${packages[@]}" apt-get autoremove -yqq --purge apt-get clean && rm -rf /var/lib/apt/lists/* } diff --git a/scripts/docker/pip b/scripts/docker/pip index 226adf295663c..7fdca47a52797 100644 --- a/scripts/docker/pip +++ b/scripts/docker/pip @@ -23,7 +23,7 @@ if [[ $(id -u) == "0" ]]; then echo echo "${COLOR_RED}You are running pip as root. Please use 'airflow' user to run pip!${COLOR_RESET}" echo - echo "${COLOR_YELLOW}See: https://airflow.apache.org/docs/docker-stack/build.html#adding-a-new-pypi-package${COLOR_RESET}" + echo "${COLOR_YELLOW}See: https://airflow.apache.org/docs/docker-stack/build.html#adding-new-pypi-packages-individually${COLOR_RESET}" echo exit 1 fi diff --git a/scripts/in_container/bin/run_tmux b/scripts/in_container/bin/run_tmux index 06ecacbe1261b..431fe3faefcdb 100755 --- a/scripts/in_container/bin/run_tmux +++ b/scripts/in_container/bin/run_tmux @@ -101,7 +101,7 @@ fi if [[ ${AIRFLOW__CORE__EXECUTOR} == "airflow.providers.edge3.executors.edge_executor.EdgeExecutor" ]]; then tmux select-pane -t 0 tmux split-window -h - tmux set-option -p @airflow_component EdgeExec + tmux set-option -p @airflow_component "Edge Worker" # Ensure we are not leaking any DB connection information to Edge Worker process tmux send-keys 'unset AIRFLOW__DATABASE__SQL_ALCHEMY_CONN' C-m diff --git a/scripts/in_container/check_environment.sh b/scripts/in_container/check_environment.sh index 49c5e7a443ab9..57ae94c1058ce 100755 --- a/scripts/in_container/check_environment.sh +++ b/scripts/in_container/check_environment.sh @@ -31,13 +31,21 @@ EXIT_CODE=0 . "$( dirname "${BASH_SOURCE[0]}" )/check_connectivity.sh" +export COLOR_YELLOW=$'\e[33m' +export COLOR_RESET=$'\e[0m' + function check_service { local label=$1 local call=$2 local max_check=${3:=1} + local initial_delay="${4:-0}" - check_service_connection "${label}" "${call}" "${max_check}" - EXIT_CODE=$? + if [[ ${initial_delay} != 0 ]]; then + echo "${COLOR_YELLOW}Adding initial delay. Waiting ${initial_delay} seconds before checking ${label}.${COLOR_RESET}" + sleep "${initial_delay}" + fi + check_service_connection "${label}" "${call}" "${max_check}" + EXIT_CODE=$? } function check_db_backend { @@ -174,6 +182,10 @@ if [[ ${INTEGRATION_YDB} == "true" ]]; then check_service "YDB Cluster" "run_nc ydb 2136" 50 fi +if [[ ${INTEGRATION_GREMLIN} == "true" ]]; then + check_service "gremlin" "run_nc gremlin 8182" 100 30 +fi + if [[ ${EXIT_CODE} != 0 ]]; then echo echo "Error: some of the CI environment failed to initialize!" diff --git a/scripts/in_container/in_container_utils.py b/scripts/in_container/in_container_utils.py index 2de8bd8264641..2083f00c537eb 100644 --- a/scripts/in_container/in_container_utils.py +++ b/scripts/in_container/in_container_utils.py @@ -107,3 +107,31 @@ def validate_openapi_file(file_path: Path) -> bool: print(f"[ERROR] OpenAPI validation failed for {file_path}: {e}", file=sys.stderr) sys.exit(1) return True + + +def get_provider_id_from_path(file_path: Path) -> str | None: + """ + Get the provider id from the path of the file it belongs to. + """ + for parent in file_path.parents: + # This works fine for both new and old providers structure - because we moved provider.yaml to + # the top-level of the provider and this code finding "providers" will find the "providers" package + # in old structure and "providers" directory in new structure - in both cases we can determine + # the provider id from the relative folders + if (parent / "provider.yaml").exists(): + for providers_root_candidate in parent.parents: + if providers_root_candidate.name == "providers": + return parent.relative_to(providers_root_candidate).as_posix().replace("/", ".") + else: + return None + return None + + +def get_provider_base_dir_from_path(file_path: Path) -> Path | None: + """ + Get the provider base dir (where provider.yaml is) from the path of the file it belongs to. + """ + for parent in file_path.parents: + if (parent / "provider.yaml").exists(): + return parent + return None diff --git a/scripts/in_container/install_airflow_and_providers.py b/scripts/in_container/install_airflow_and_providers.py index 787fc9c6f80e4..05a437178b9c3 100755 --- a/scripts/in_container/install_airflow_and_providers.py +++ b/scripts/in_container/install_airflow_and_providers.py @@ -135,10 +135,10 @@ def get_airflow_constraints_location( airflow_package_version: str | None, github_repository: str, python_version: str, - airflow_skip_constraints: bool, + install_airflow_with_constraints: bool, ) -> str | None: """For Airflow we determine constraints in this order of preference: - * AIRFLOW_SKIP_CONSTRAINTS=true - no constraints + * INSTALL_AIRFLOW_WITH_CONSTRAINTS=false - no constraints * AIRFLOW_CONSTRAINTS_LOCATION - constraints from this location (url) * AIRFLOW_CONSTRAINTS_REFERENCE + constraints mode if specified * if we know airflow version "constraints-VERSION" + constraints mode @@ -146,7 +146,7 @@ def get_airflow_constraints_location( * constraints-main + constraints mode - as fallback """ console.print("[bright_blue]Determining airflow constraints location") - if airflow_skip_constraints: + if not install_airflow_with_constraints: console.print("[bright_blue]Skipping airflow constraints.") return None if airflow_constraints_location: @@ -213,7 +213,6 @@ class InstallationSpec(NamedTuple): airflow_core_distribution: str | None airflow_constraints_location: str | None airflow_task_sdk_distribution: str | None - airflow_task_sdk_constraints_location: str | None airflow_ctl_distribution: str | None airflow_ctl_constraints_location: str | None provider_distributions: list[str] @@ -229,7 +228,7 @@ def find_installation_spec( airflow_constraints_location: str | None, airflow_constraints_reference: str, airflow_extras: str, - airflow_skip_constraints: bool, + install_airflow_with_constraints: bool, default_constraints_branch: str, github_repository: str, install_selected_providers: str, @@ -283,7 +282,7 @@ def find_installation_spec( if airflow_version: console.print(f"[bright_blue]Using airflow version retrieved from package: {airflow_version}") airflow_constraints_location = get_airflow_constraints_location( - airflow_skip_constraints=airflow_skip_constraints, + install_airflow_with_constraints=install_airflow_with_constraints, airflow_constraints_mode=airflow_constraints_mode, airflow_constraints_location=airflow_constraints_location, airflow_constraints_reference=airflow_constraints_reference, @@ -296,26 +295,13 @@ def find_installation_spec( airflow_distribution_spec += airflow_extras # We always install latest task-sdk - it's independent from Airflow airflow_task_sdk_spec = find_airflow_task_sdk_package(extension) - if airflow_task_sdk_spec: - airflow_task_sdk_constraints_location = get_airflow_constraints_location( - airflow_skip_constraints=airflow_skip_constraints, - airflow_constraints_mode=airflow_constraints_mode, - airflow_constraints_location=airflow_constraints_location, - airflow_constraints_reference=airflow_constraints_reference, - airflow_package_version="main", - default_constraints_branch=default_constraints_branch, - github_repository=github_repository, - python_version=python_version, - ) - else: - airflow_task_sdk_constraints_location = None airflow_task_sdk_distribution = airflow_task_sdk_spec # We always install latest ctl - it's independent of Airflow airflow_ctl_spec = find_airflow_ctl_sdk_package(extension=extension) if airflow_ctl_spec: airflow_ctl_constraints_location = get_airflow_constraints_location( - airflow_skip_constraints=airflow_skip_constraints, + install_airflow_with_constraints=install_airflow_with_constraints, airflow_constraints_mode=airflow_constraints_mode, airflow_constraints_location=airflow_constraints_location, airflow_constraints_reference=airflow_constraints_reference, @@ -333,7 +319,6 @@ def find_installation_spec( airflow_core_distribution_spec = None airflow_constraints_location = None airflow_task_sdk_distribution = None - airflow_task_sdk_constraints_location = None airflow_ctl_distribution = None airflow_ctl_constraints_location = None elif use_airflow_version.startswith(ALLOWED_VCS_PROTOCOLS): @@ -345,7 +330,7 @@ def find_installation_spec( airflow_distribution_spec = use_airflow_version airflow_core_distribution_spec = use_airflow_version airflow_constraints_location = get_airflow_constraints_location( - airflow_skip_constraints=airflow_skip_constraints, + install_airflow_with_constraints=install_airflow_with_constraints, airflow_constraints_mode=airflow_constraints_mode, airflow_constraints_location=airflow_constraints_location, airflow_constraints_reference=airflow_constraints_reference, @@ -357,7 +342,7 @@ def find_installation_spec( console.print(f"\nInstalling airflow task-sdk from remote spec {use_airflow_version}\n") airflow_task_sdk_distribution = f"apache-airflow-task-sdk @ {use_airflow_version}" airflow_constraints_location = get_airflow_constraints_location( - airflow_skip_constraints=airflow_skip_constraints, + install_airflow_with_constraints=install_airflow_with_constraints, airflow_constraints_mode=airflow_constraints_mode, airflow_constraints_location=airflow_constraints_location, airflow_constraints_reference=airflow_constraints_reference, @@ -366,11 +351,10 @@ def find_installation_spec( github_repository=github_repository, python_version=python_version, ) - airflow_task_sdk_constraints_location = None console.print(f"\nInstalling airflow ctl from remote spec {use_airflow_version}\n") airflow_ctl_distribution = f"apache-airflow-ctl @ {use_airflow_version}" airflow_ctl_constraints_location = get_airflow_constraints_location( - airflow_skip_constraints=airflow_skip_constraints, + install_airflow_with_constraints=install_airflow_with_constraints, airflow_constraints_mode=airflow_constraints_mode, airflow_constraints_location=airflow_constraints_location, airflow_constraints_reference=airflow_constraints_reference, @@ -391,7 +375,7 @@ def find_installation_spec( f"apache-airflow-core=={use_airflow_version}" if not use_airflow_version.startswith("2") else None ) airflow_constraints_location = get_airflow_constraints_location( - airflow_skip_constraints=airflow_skip_constraints, + install_airflow_with_constraints=install_airflow_with_constraints, airflow_constraints_mode=airflow_constraints_mode, airflow_constraints_location=airflow_constraints_location, airflow_constraints_reference=airflow_constraints_reference, @@ -404,7 +388,6 @@ def find_installation_spec( "\nDo not install airflow task-sdk. It should be installed automatically if needed by providers." ) airflow_task_sdk_distribution = None - airflow_task_sdk_constraints_location = None console.print( "\nDo not install airflow ctl. It should be installed automatically if needed by providers." @@ -436,7 +419,6 @@ def find_installation_spec( airflow_core_distribution=airflow_core_distribution_spec, airflow_constraints_location=airflow_constraints_location, airflow_task_sdk_distribution=airflow_task_sdk_distribution, - airflow_task_sdk_constraints_location=airflow_task_sdk_constraints_location, airflow_ctl_distribution=airflow_ctl_distribution, airflow_ctl_constraints_location=airflow_ctl_constraints_location, provider_distributions=provider_distributions_list, @@ -493,14 +475,6 @@ def find_installation_spec( envvar="AIRFLOW_EXTRAS", help="Airflow extras to install", ) -@click.option( - "--airflow-skip-constraints", - is_flag=True, - default=False, - show_default=True, - envvar="AIRFLOW_SKIP_CONSTRAINTS", - help="Skip constraints for airflow installation if set.", -) @click.option( "--default-constraints-branch", required=True, @@ -608,7 +582,6 @@ def install_airflow_and_providers( airflow_constraints_location: str, airflow_constraints_reference: str, airflow_extras: str, - airflow_skip_constraints: bool, default_constraints_branch: str, github_actions: bool, github_repository: str, @@ -630,7 +603,7 @@ def install_airflow_and_providers( airflow_constraints_location=airflow_constraints_location, airflow_constraints_reference=airflow_constraints_reference, airflow_extras=airflow_extras, - airflow_skip_constraints=airflow_skip_constraints, + install_airflow_with_constraints=install_airflow_with_constraints, default_constraints_branch=default_constraints_branch, github_repository=github_repository, install_selected_providers=install_selected_providers, @@ -643,163 +616,13 @@ def install_airflow_and_providers( use_airflow_version=use_airflow_version, use_distributions_from_dist=use_distributions_from_dist, ) - if installation_spec.airflow_distribution and install_airflow_with_constraints: - console.print("[bright_blue]Installing airflow with constraints") - console.print( - "[bright_blue]Airflow constraints location: ", installation_spec.airflow_constraints_location - ) - console.print("[bright_blue]Airflow distribution", installation_spec.airflow_distribution) - console.print("[bright_blue]Airflow core distribution", installation_spec.airflow_core_distribution) - console.print( - "[bright_blue]Airflow task-sdk constraints location: ", - installation_spec.airflow_task_sdk_constraints_location, - ) - console.print( - "[bright_blue]Airflow task-sdk distribution", installation_spec.airflow_task_sdk_distribution - ) - console.print( - "[bright_blue]Airflow ctl constraints location: ", - installation_spec.airflow_ctl_constraints_location, - ) - console.print("[bright_blue]Airflow ctl distribution", installation_spec.airflow_ctl_distribution) - base_install_airflow_cmd = [ - "/usr/local/bin/uv", - "pip", - "install", - ] - if installation_spec.pre_release: - console.print("[bright_blue]Allowing pre-release versions of airflow") - base_install_airflow_cmd.append("--pre") - base_install_airflow_cmd.append(installation_spec.airflow_distribution) - console.print( - f"\n[bright_blue]Installing airflow distribution: {installation_spec.airflow_distribution} with constraints" - ) - if installation_spec.airflow_core_distribution: - console.print( - f"\n[bright_blue]Installing airflow core distribution: {installation_spec.airflow_core_distribution} with constraints" - ) - base_install_airflow_cmd.append(installation_spec.airflow_core_distribution) - install_airflow_cmd = base_install_airflow_cmd.copy() - if installation_spec.airflow_constraints_location: - console.print(f"[bright_blue]Use constraints: {installation_spec.airflow_constraints_location}") - install_airflow_cmd.extend(["--constraint", installation_spec.airflow_constraints_location]) - console.print() - result = run_command(install_airflow_cmd, github_actions=github_actions, check=False) - if result.returncode != 0: - console.print( - "[warning]Installation with constraints failed - might be because pre-installed provider" - " has conflicting dependencies in PyPI. Falling back to a non-constraint installation." - ) - run_command(base_install_airflow_cmd, github_actions=github_actions, check=True) - if installation_spec.airflow_task_sdk_distribution: - base_install_airflow_task_sdk_cmd = base_install_airflow_cmd.copy() - base_install_airflow_task_sdk_cmd[-1] = installation_spec.airflow_task_sdk_distribution - console.print( - f"\n[bright_blue]Installing airflow task-sdk distribution: " - f"{installation_spec.airflow_task_sdk_distribution} with constraints" - ) - # if airflow is also being installed we should add airflow to the base_install_providers_cmd - # to avoid accidentally upgrading airflow to a version that is different than installed in the - # previous step - if installation_spec.airflow_distribution: - base_install_airflow_task_sdk_cmd.append(installation_spec.airflow_distribution) - install_airflow_task_sdk_cmd = base_install_airflow_task_sdk_cmd.copy() - if installation_spec.airflow_task_sdk_constraints_location: - console.print( - f"[bright_blue]Use constraints: {installation_spec.airflow_task_sdk_constraints_location}" - ) - install_airflow_task_sdk_cmd.extend( - ["--constraint", installation_spec.airflow_task_sdk_constraints_location] - ) - console.print() - run_command(install_airflow_task_sdk_cmd, github_actions=github_actions, check=True) - if result.returncode != 0: - console.print( - "[warning]Installation with constraints failed - might be because there are" - " conflicting dependencies in PyPI. Falling back to a non-constraint installation." - ) - run_command(base_install_airflow_cmd, github_actions=github_actions, check=True) + if install_airflow_with_constraints: + if installation_spec.airflow_distribution: + _install_only_airflow_airflow_core_task_sdk_with_constraints(installation_spec, github_actions) if installation_spec.airflow_ctl_distribution: - base_install_airflow_ctl_cmd = base_install_airflow_cmd.copy() - base_install_airflow_ctl_cmd[-1] = installation_spec.airflow_ctl_distribution - console.print( - f"\n[bright_blue]Installing airflow ctl distribution: " - f"{installation_spec.airflow_ctl_distribution} with constraints" - ) - # if airflow is also being installed we should add airflow to the base_install_providers_cmd - # to avoid accidentally upgrading airflow to a version that is different from installed in the - # previous step - if installation_spec.airflow_distribution: - base_install_airflow_ctl_cmd.append(installation_spec.airflow_distribution) - install_airflow_ctl_cmd = base_install_airflow_ctl_cmd.copy() - if installation_spec.airflow_ctl_constraints_location: - console.print( - f"[bright_blue]Use constraints: {installation_spec.airflow_ctl_constraints_location}" - ) - install_airflow_ctl_cmd.extend( - ["--constraint", installation_spec.airflow_ctl_constraints_location] - ) - console.print() - run_command(install_airflow_ctl_cmd, github_actions=github_actions, check=True) - if result.returncode != 0: - console.print( - "[warning]Installation with constraints failed - might be because there are" - " conflicting dependencies in PyPI. Falling back to a non-constraint installation." - ) - run_command(base_install_airflow_cmd, github_actions=github_actions, check=True) + _install_airflow_ctl_with_constraints(installation_spec, github_actions) if installation_spec.provider_distributions or not install_airflow_with_constraints: - console.print("[bright_blue]Installing airflow without constraints") - base_install_providers_cmd = [ - "/usr/local/bin/uv", - "pip", - "install", - ] - if installation_spec.pre_release: - base_install_providers_cmd.append("--pre") - if not install_airflow_with_constraints and installation_spec.airflow_distribution: - console.print( - f"\n[bright_blue]Installing airflow distribution: {installation_spec.airflow_distribution} without constraints" - ) - base_install_providers_cmd.append(installation_spec.airflow_distribution) - if installation_spec.airflow_core_distribution: - console.print( - f"\n[bright_blue]Installing airflow core distribution: {installation_spec.airflow_core_distribution} without constraints" - ) - base_install_providers_cmd.append(installation_spec.airflow_core_distribution) - if installation_spec.airflow_task_sdk_distribution: - console.print( - f"\n[bright_blue]Installing task-sdk distribution: {installation_spec.airflow_task_sdk_distribution} without constraints" - ) - base_install_providers_cmd.append(installation_spec.airflow_task_sdk_distribution) - console.print("\n[bright_blue]Installing provider distributions without constraints:") - for provider_package in sorted(installation_spec.provider_distributions): - console.print(f" {provider_package}") - console.print() - for provider_package in installation_spec.provider_distributions: - base_install_providers_cmd.append(provider_package) - install_providers_command = base_install_providers_cmd.copy() - # if airflow is also being installed we should add airflow to the base_install_providers_cmd - # to avoid accidentally upgrading airflow to a version that is different than installed in the - # previous step - if installation_spec.airflow_distribution: - base_install_providers_cmd.append(installation_spec.airflow_distribution) - if installation_spec.provider_constraints_location: - console.print( - f"[bright_blue]with constraints: {installation_spec.provider_constraints_location}\n" - ) - install_providers_command.extend( - ["--constraint", installation_spec.provider_constraints_location] - ) - console.print() - result = run_command(install_providers_command, github_actions=github_actions, check=False) - if result.returncode != 0: - console.print( - "[warning]Installation with constraints failed - might be because pre-installed provider" - " has conflicting dependencies in PyPI. Falling back to a non-constraint installation." - ) - run_command(base_install_providers_cmd, github_actions=github_actions, check=True) - else: - run_command(base_install_providers_cmd, github_actions=github_actions, check=True) + _install_airflow_and_optionally_providers_together(installation_spec, github_actions) if mount_sources in ["tests", "remove"]: console.print("[bright_blue]Uninstall editable packages installed in CI image") command = [ @@ -881,5 +704,144 @@ def install_airflow_and_providers( console.print("\n[green]Done!") +def _install_airflow_and_optionally_providers_together( + installation_spec: InstallationSpec, github_actions: bool +): + console.print("[bright_blue]Installing airflow and optionally providers together") + base_install_cmd = [ + "/usr/local/bin/uv", + "pip", + "install", + ] + if installation_spec.pre_release: + base_install_cmd.append("--pre") + if installation_spec.airflow_distribution: + console.print( + f"\n[bright_blue]Adding airflow distribution to installation: {installation_spec.airflow_distribution} " + ) + base_install_cmd.append(installation_spec.airflow_distribution) + if installation_spec.airflow_core_distribution: + console.print( + f"\n[bright_blue]Adding airflow core distribution to installation: {installation_spec.airflow_core_distribution}" + ) + base_install_cmd.append(installation_spec.airflow_core_distribution) + if installation_spec.airflow_task_sdk_distribution: + console.print( + f"\n[bright_blue]Adding task-sdk distribution to installation: {installation_spec.airflow_task_sdk_distribution}" + ) + base_install_cmd.append(installation_spec.airflow_task_sdk_distribution) + if installation_spec.airflow_ctl_distribution: + console.print( + f"\n[bright_blue]Adding airflow ctl distribution to installation: {installation_spec.airflow_ctl_distribution}" + ) + base_install_cmd.append(installation_spec.airflow_ctl_distribution) + console.print("\n[bright_blue]Adding provider distributions to installation:") + for provider_package in sorted(installation_spec.provider_distributions): + console.print(f" {provider_package}") + console.print() + for provider_package in installation_spec.provider_distributions: + base_install_cmd.append(provider_package) + install_providers_command = base_install_cmd.copy() + if installation_spec.provider_constraints_location: + console.print( + f"[bright_blue]Installing with provider constraints: {installation_spec.provider_constraints_location}\n" + ) + install_providers_command.extend(["--constraint", installation_spec.provider_constraints_location]) + console.print() + result = run_command(install_providers_command, github_actions=github_actions, check=False) + if result.returncode != 0: + console.print( + "[warning]Installation with constraints failed - might be because pre-installed provider" + " has conflicting dependencies in PyPI. Falling back to a non-constraint installation." + ) + run_command(base_install_cmd, github_actions=github_actions, check=True) + else: + run_command(base_install_cmd, github_actions=github_actions, check=True) + + +def _install_airflow_ctl_with_constraints(installation_spec: InstallationSpec, github_actions: bool): + console.print( + f"\n[bright_blue]Installing airflow ctl distribution: " + f"{installation_spec.airflow_ctl_distribution} with constraints" + ) + base_install_airflow_ctl_cmd = [ + "/usr/local/bin/uv", + "pip", + "install", + ] + # if airflow is also being installed we should add airflow to the base_install_providers_cmd + # to avoid accidentally upgrading airflow to a version that is different from installed in the + # previous step + if installation_spec.airflow_distribution: + base_install_airflow_ctl_cmd.append(installation_spec.airflow_distribution) + install_airflow_ctl_cmd = base_install_airflow_ctl_cmd.copy() + if installation_spec.airflow_ctl_constraints_location: + console.print(f"[bright_blue]Use constraints: {installation_spec.airflow_ctl_constraints_location}") + install_airflow_ctl_cmd.extend(["--constraint", installation_spec.airflow_ctl_constraints_location]) + console.print() + result = run_command(install_airflow_ctl_cmd, github_actions=github_actions, check=True) + if result.returncode != 0: + console.print( + "[warning]Installation with constraints failed - might be because there are" + " conflicting dependencies in PyPI. Falling back to a non-constraint installation." + ) + run_command(base_install_airflow_ctl_cmd, github_actions=github_actions, check=True) + + +def _install_only_airflow_airflow_core_task_sdk_with_constraints( + installation_spec: InstallationSpec, github_actions: bool +) -> None: + console.print("[bright_blue]Installing airflow core, task-sdk with constraints") + console.print( + "[bright_blue]Airflow constraints location: ", installation_spec.airflow_constraints_location + ) + console.print("[bright_blue]Airflow distribution", installation_spec.airflow_distribution) + console.print("[bright_blue]Airflow core distribution", installation_spec.airflow_core_distribution) + console.print( + "[bright_blue]Airflow task-sdk distribution", installation_spec.airflow_task_sdk_distribution + ) + console.print( + "[bright_blue]Airflow ctl constraints location: ", + installation_spec.airflow_ctl_constraints_location, + ) + base_install_airflow_cmd = [ + "/usr/local/bin/uv", + "pip", + "install", + ] + if installation_spec.pre_release: + console.print("[bright_blue]Allowing pre-release versions of airflow") + base_install_airflow_cmd.append("--pre") + if installation_spec.airflow_distribution: + console.print( + f"\n[bright_blue]Installing airflow distribution: {installation_spec.airflow_distribution} with constraints" + ) + base_install_airflow_cmd.append(installation_spec.airflow_distribution) + if installation_spec.airflow_core_distribution: + console.print( + f"\n[bright_blue]Installing airflow core distribution: {installation_spec.airflow_core_distribution} with constraints" + ) + base_install_airflow_cmd.append(installation_spec.airflow_core_distribution) + if installation_spec.airflow_task_sdk_distribution: + base_install_airflow_cmd.append(installation_spec.airflow_task_sdk_distribution) + console.print( + f"\n[bright_blue]Installing airflow task-sdk distribution: " + f"{installation_spec.airflow_task_sdk_distribution} with constraints" + ) + console.print() + install_airflow_cmd = base_install_airflow_cmd.copy() + if installation_spec.airflow_constraints_location: + console.print(f"[bright_blue]Use constraints: {installation_spec.airflow_constraints_location}") + install_airflow_cmd.extend(["--constraint", installation_spec.airflow_constraints_location]) + console.print() + result = run_command(install_airflow_cmd, github_actions=github_actions, check=False) + if result.returncode != 0: + console.print( + "[warning]Installation with constraints failed - might be because pre-installed provider" + " has conflicting dependencies in PyPI. Falling back to a non-constraint installation." + ) + run_command(base_install_airflow_cmd, github_actions=github_actions, check=True) + + if __name__ == "__main__": install_airflow_and_providers() diff --git a/scripts/in_container/run_capture_airflowctl_help.py b/scripts/in_container/run_capture_airflowctl_help.py new file mode 100644 index 0000000000000..45e60a2b629c5 --- /dev/null +++ b/scripts/in_container/run_capture_airflowctl_help.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python3 +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import hashlib +import os +import sys +from pathlib import Path + +from airflowctl import __file__ as AIRFLOW_CTL_SRC_PATH +from rich.console import Console +from rich.terminal_theme import SVG_EXPORT_THEME +from rich.text import Text + +sys.path.insert(0, str(Path(__file__).parent.resolve())) +AIRFLOW_ROOT_PATH = Path(AIRFLOW_CTL_SRC_PATH).parents[2] +AIRFLOW_CTL_SOURCES_PATH = AIRFLOW_ROOT_PATH / "src" + +sys.path.insert(0, str(Path(__file__).parent.resolve())) # make sure common_precommit_utils is imported +AIRFLOWCTL_IMAGES_PATH = AIRFLOW_ROOT_PATH / "docs/images/" +HASH_FILE = AIRFLOW_ROOT_PATH / "docs/images/" / "command_hashes.txt" +COMMANDS = [ + "", # for `airflowctl -h`, main help + "assets", + "auth", + "backfills", + "config", + "connections", + "dag", + "dagrun", + "jobs", + "pools", + "providers", + "variables", + "version", +] + +SUBCOMMANDS = [ + "auth login", +] + + +# Get new hashes +def get_airflowctl_command_hash_dict(commands): + hash_dict = {} + for command in commands: + help_text = os.popen(f"python {AIRFLOW_CTL_SOURCES_PATH}/airflowctl/__main__.py {command} -h").read() + hash_dict[command if command != "" else "main"] = hashlib.md5(help_text.encode("utf-8")).hexdigest() + return hash_dict + + +def regenerate_help_images_for_all_airflowctl_commands(commands: list[str]) -> int: + hash_file = AIRFLOWCTL_IMAGES_PATH / "command_hashes.txt" + os.makedirs(AIRFLOWCTL_IMAGES_PATH, exist_ok=True) + console = Console(color_system="standard", record=True) + env = os.environ.copy() + env["TERM"] = "xterm-256color" + + # Load old hashes if present + old_hash_dict = {} + if hash_file.exists(): + for line in hash_file.read_text().splitlines(): + if line.strip(): + cmd, hash_val = line.split(":", 1) + old_hash_dict[cmd] = hash_val + + new_hash_dict = get_airflowctl_command_hash_dict(commands) + + # Check for changes + changed_commands = [] + for command in commands: + command_key = command + if command == "": + command_key = "main" + if old_hash_dict.get(command_key) != new_hash_dict[command_key]: + changed_commands.append(command) + + if not changed_commands: + console.print("[bright_blue]The hash dumps old/new are the same. Returning with return code 0.") + return 0 + + # Generate SVGs for changed commands + for command in changed_commands: + help_text = os.popen(f"airflowctl {command} -h").read() + text_obj = Text.from_ansi(help_text) + # Clear previous record, print the text, then export SVG + console.clear() + console.print(text_obj) + svg_content = console.export_svg(title=f"Command: {command or 'main'}", theme=SVG_EXPORT_THEME) + output_file = AIRFLOWCTL_IMAGES_PATH / f"output_{command.replace(' ', '_') or 'main'}.svg" + with open(output_file, "w") as svg_file: + svg_file.write(svg_content) + + # Write new hashes + with open(hash_file, "w") as f: + for cmd, hash_val in new_hash_dict.items(): + f.write(f"{cmd}:{hash_val}\n") + console.print("[info]New hash of airflowctl commands written") + + return 0 + + +try: + regenerate_help_images_for_all_airflowctl_commands(COMMANDS + SUBCOMMANDS) +except Exception as e: + print(f"Error: {e}") + sys.exit(1) diff --git a/scripts/in_container/run_check_imports_in_providers.py b/scripts/in_container/run_check_imports_in_providers.py new file mode 100644 index 0000000000000..974a9354a087c --- /dev/null +++ b/scripts/in_container/run_check_imports_in_providers.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import json +import os.path +import subprocess +import sys +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).parent.resolve())) +from in_container_utils import console, get_provider_base_dir_from_path, get_provider_id_from_path + +errors_found = False + + +def check_imports(): + global errors_found + cmd = [ + "ruff", + "analyze", + "graph", + "--python", + "/usr/local/bin/python", + ] + console.print("Cmd", cmd) + import_tree_str = subprocess.check_output(cmd) + import_tree = json.loads(import_tree_str) + # Uncomment these if you want to debug strange dependencies and see if ruff gets it right + console.print("Dependencies discovered by ruff:") + console.print(import_tree) + + for importing_file in sys.argv[1:]: + if not importing_file.startswith("providers/"): + console.print(f"[yellow]Skipping non-provider file: {importing_file}") + continue + importing_file_path = Path(importing_file) + console.print(importing_file_path) + imported_files_array = import_tree.get(importing_file, None) + if imported_files_array is None: + continue + imported_file_paths = [Path(file) for file in imported_files_array] + for imported_file_path in imported_file_paths: + if imported_file_path.name == "version_compat.py": + # Note - this will check also imports from other places - not only from providers + # Which means that import from tests_common, and airflow will be also banned + common_path = os.path.commonpath([importing_file, imported_file_path.as_posix()]) + imported_file_parent_dir = imported_file_path.parent.as_posix() + if common_path != imported_file_parent_dir: + provider_id = get_provider_id_from_path(importing_file_path) + provider_dir = get_provider_base_dir_from_path(importing_file_path) + console.print( + f"\n[red]Invalid import of `version_compat` module in provider {provider_id} in:\n" + ) + console.print(f"[yellow]{importing_file_path}") + console.print( + f"\n[bright_blue]The AIRFLOW_V_X_Y_PLUS import should be " + f"from the {provider_id} provider root directory ({provider_dir}), but it is currently from:" + ) + console.print(f"\n[yellow]{imported_file_path}\n") + console.print( + f"1. Copy `version_compat`.py to `{provider_dir}/version_compat.py` if not there.\n" + f"2. Import the version constants you need as:\n\n" + f"[yellow]from airflow.providers.{provider_id}.version_compat import ...[/]\n" + f"\n" + ) + errors_found = True + + +check_imports() + +if errors_found: + console.print("\n[red]Errors found in imports![/]\n") + sys.exit(1) +else: + console.print("\n[green]All version_compat imports are correct![/]\n") diff --git a/scripts/in_container/run_ci_tests.sh b/scripts/in_container/run_ci_tests.sh index a93d1b4afddad..1774aef9eebe3 100755 --- a/scripts/in_container/run_ci_tests.sh +++ b/scripts/in_container/run_ci_tests.sh @@ -23,7 +23,6 @@ echo "Starting the tests with those pytest arguments:" "${@}" echo set +e - pytest "${@}" RES=$? diff --git a/scripts/in_container/run_docs_build.sh b/scripts/in_container/run_docs_build.sh index 39b13f0bde7ba..bacfb75efe9be 100755 --- a/scripts/in_container/run_docs_build.sh +++ b/scripts/in_container/run_docs_build.sh @@ -18,6 +18,8 @@ # shellcheck source=scripts/in_container/_in_container_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" +git config --global --add safe.directory /opt/airflow + cd "${AIRFLOW_SOURCES}" || exit 1 python -m docs.build_docs "${@}" diff --git a/scripts/in_container/run_generate_migration.sh b/scripts/in_container/run_generate_migration.sh index 46560c9112272..85b5d485fb88e 100755 --- a/scripts/in_container/run_generate_migration.sh +++ b/scripts/in_container/run_generate_migration.sh @@ -19,7 +19,7 @@ . "$(dirname "${BASH_SOURCE[0]}")/_in_container_script_init.sh" cd "${AIRFLOW_SOURCES}" || exit 1 -cd "airflow" || exit 1 +cd "airflow-core/src/airflow" || exit 1 airflow db reset -y airflow db downgrade -n 2.10.3 -y airflow db migrate -r heads diff --git a/scripts/in_container/run_provider_yaml_files_check.py b/scripts/in_container/run_provider_yaml_files_check.py index d1d11cb4d9c14..3dab9611ea508 100755 --- a/scripts/in_container/run_provider_yaml_files_check.py +++ b/scripts/in_container/run_provider_yaml_files_check.py @@ -61,6 +61,9 @@ "airflow.providers.tabular.hooks.tabular", "airflow.providers.yandex.hooks.yandexcloud_dataproc", "airflow.providers.yandex.operators.yandexcloud_dataproc", + "airflow.providers.google.cloud.hooks.datacatalog", + "airflow.providers.google.cloud.operators.datacatalog", + "airflow.providers.google.cloud.links.datacatalog", ] KNOWN_DEPRECATED_CLASSES = [ @@ -78,7 +81,7 @@ PROVIDER_DATA_SCHEMA_PATH = AIRFLOW_CORE_SOURCES_PATH.joinpath("airflow", "provider.yaml.schema.json") PROVIDER_ISSUE_TEMPLATE_PATH = AIRFLOW_ROOT_PATH.joinpath( - ".github", "ISSUE_TEMPLATE", "airflow_providers_bug_report.yml" + ".github", "ISSUE_TEMPLATE", "3-airflow_providers_bug_report.yml" ) CORE_INTEGRATIONS = ["SQL", "Local"]